[ 589.922648] env[68244]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68244) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 589.922985] env[68244]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68244) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 589.923132] env[68244]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68244) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 589.923436] env[68244]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 590.024388] env[68244]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68244) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 590.033888] env[68244]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68244) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 590.077301] env[68244]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 590.636407] env[68244]: INFO nova.virt.driver [None req-90f0bf55-8c76-49e4-a91b-b180270bb59e None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 590.708522] env[68244]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.708720] env[68244]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.708827] env[68244]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68244) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 593.599098] env[68244]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-1398fb98-79d0-400d-94db-5d4e1a7a1f49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.615016] env[68244]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68244) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 593.615175] env[68244]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-5a19faf6-c2bf-478c-ae2e-c8088eed6644 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.649915] env[68244]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 1eacb. [ 593.650070] env[68244]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.941s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.650608] env[68244]: INFO nova.virt.vmwareapi.driver [None req-90f0bf55-8c76-49e4-a91b-b180270bb59e None None] VMware vCenter version: 7.0.3 [ 593.653999] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3644097-c7e4-454d-a3b5-e87cfc0b6bfe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.670980] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d7d724-9e30-4c84-8b2e-5f53bd60878b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.676722] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c863448a-fa74-4c56-b922-25f853e9998e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.683159] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb26218-764c-41a1-bb45-78152ef913b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.695933] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0828be7-0a41-44df-b511-5d86ecb685c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.701744] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794fd050-c7f2-4d8d-8362-33c38c08cfb0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.731337] env[68244]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-fcd30647-915f-4dba-a3df-5e043b3280f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.736574] env[68244]: DEBUG nova.virt.vmwareapi.driver [None req-90f0bf55-8c76-49e4-a91b-b180270bb59e None None] Extension org.openstack.compute already exists. {{(pid=68244) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 593.739221] env[68244]: INFO nova.compute.provider_config [None req-90f0bf55-8c76-49e4-a91b-b180270bb59e None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 594.242228] env[68244]: DEBUG nova.context [None req-90f0bf55-8c76-49e4-a91b-b180270bb59e None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),9b58929f-7d2c-4017-99d0-181c28c8332b(cell1) {{(pid=68244) load_cells /opt/stack/nova/nova/context.py:464}} [ 594.244285] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.244517] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.245225] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.245648] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Acquiring lock "9b58929f-7d2c-4017-99d0-181c28c8332b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.245836] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Lock "9b58929f-7d2c-4017-99d0-181c28c8332b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.246858] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Lock "9b58929f-7d2c-4017-99d0-181c28c8332b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.267337] env[68244]: INFO dbcounter [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Registered counter for database nova_cell0 [ 594.275503] env[68244]: INFO dbcounter [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Registered counter for database nova_cell1 [ 594.278856] env[68244]: DEBUG oslo_db.sqlalchemy.engines [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68244) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 594.279221] env[68244]: DEBUG oslo_db.sqlalchemy.engines [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68244) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 594.284375] env[68244]: ERROR nova.db.main.api [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 594.284375] env[68244]: result = function(*args, **kwargs) [ 594.284375] env[68244]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.284375] env[68244]: return func(*args, **kwargs) [ 594.284375] env[68244]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 594.284375] env[68244]: result = fn(*args, **kwargs) [ 594.284375] env[68244]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 594.284375] env[68244]: return f(*args, **kwargs) [ 594.284375] env[68244]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 594.284375] env[68244]: return db.service_get_minimum_version(context, binaries) [ 594.284375] env[68244]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 594.284375] env[68244]: _check_db_access() [ 594.284375] env[68244]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 594.284375] env[68244]: stacktrace = ''.join(traceback.format_stack()) [ 594.284375] env[68244]: [ 594.285089] env[68244]: ERROR nova.db.main.api [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 594.285089] env[68244]: result = function(*args, **kwargs) [ 594.285089] env[68244]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.285089] env[68244]: return func(*args, **kwargs) [ 594.285089] env[68244]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 594.285089] env[68244]: result = fn(*args, **kwargs) [ 594.285089] env[68244]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 594.285089] env[68244]: return f(*args, **kwargs) [ 594.285089] env[68244]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 594.285089] env[68244]: return db.service_get_minimum_version(context, binaries) [ 594.285089] env[68244]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 594.285089] env[68244]: _check_db_access() [ 594.285089] env[68244]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 594.285089] env[68244]: stacktrace = ''.join(traceback.format_stack()) [ 594.285089] env[68244]: [ 594.285480] env[68244]: WARNING nova.objects.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 594.285598] env[68244]: WARNING nova.objects.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Failed to get minimum service version for cell 9b58929f-7d2c-4017-99d0-181c28c8332b [ 594.286023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Acquiring lock "singleton_lock" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.286595] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Acquired lock "singleton_lock" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.286595] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Releasing lock "singleton_lock" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.286737] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Full set of CONF: {{(pid=68244) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 594.286882] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ******************************************************************************** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 594.287017] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] Configuration options gathered from: {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 594.287157] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 594.287344] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 594.287471] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ================================================================================ {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 594.287672] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] allow_resize_to_same_host = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.287838] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] arq_binding_timeout = 300 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.287967] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] backdoor_port = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.288109] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] backdoor_socket = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.288275] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] block_device_allocate_retries = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.288448] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] block_device_allocate_retries_interval = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.288626] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cert = self.pem {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.288791] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.288955] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute_monitors = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.289137] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] config_dir = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.289309] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] config_drive_format = iso9660 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.289440] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.289599] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] config_source = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.289760] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] console_host = devstack {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.289918] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] control_exchange = nova {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.290084] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cpu_allocation_ratio = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.290248] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] daemon = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.290413] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] debug = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.290565] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] default_access_ip_network_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.290726] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] default_availability_zone = nova {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.290878] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] default_ephemeral_format = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.291045] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] default_green_pool_size = 1000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.291285] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.291448] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] default_schedule_zone = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.291604] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] disk_allocation_ratio = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.291759] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] enable_new_services = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.291932] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] enabled_apis = ['osapi_compute'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.292106] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] enabled_ssl_apis = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.292265] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] flat_injected = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.292423] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] force_config_drive = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.292578] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] force_raw_images = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.292744] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] graceful_shutdown_timeout = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.292948] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] heal_instance_info_cache_interval = -1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.293199] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] host = cpu-1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.293383] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.293548] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] initial_disk_allocation_ratio = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.293707] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] initial_ram_allocation_ratio = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.293913] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.294089] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instance_build_timeout = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.294254] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instance_delete_interval = 300 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.294418] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instance_format = [instance: %(uuid)s] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.294578] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instance_name_template = instance-%08x {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.294735] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instance_usage_audit = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.294898] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instance_usage_audit_period = month {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.295066] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.295230] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] instances_path = /opt/stack/data/nova/instances {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.295389] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] internal_service_availability_zone = internal {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.295543] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] key = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.295703] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] live_migration_retry_count = 30 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.295869] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_color = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.296045] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_config_append = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.296245] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.296414] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_dir = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.296572] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_file = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.296723] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_options = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.296899] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_rotate_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.297082] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_rotate_interval_type = days {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.297247] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] log_rotation_type = none {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.297377] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.297504] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.297688] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.297886] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.298029] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.298199] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] long_rpc_timeout = 1800 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.298359] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] max_concurrent_builds = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.298541] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] max_concurrent_live_migrations = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.298729] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] max_concurrent_snapshots = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.298900] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] max_local_block_devices = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.299072] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] max_logfile_count = 30 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.299235] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] max_logfile_size_mb = 200 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.299395] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] maximum_instance_delete_attempts = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.299560] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] metadata_listen = 0.0.0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.299723] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] metadata_listen_port = 8775 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.299888] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] metadata_workers = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.300058] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] migrate_max_retries = -1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.300225] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] mkisofs_cmd = genisoimage {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.300430] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] my_block_storage_ip = 10.180.1.21 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.300562] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] my_ip = 10.180.1.21 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.300758] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.300920] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] network_allocate_retries = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.301107] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.301276] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] osapi_compute_listen = 0.0.0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.301435] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] osapi_compute_listen_port = 8774 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.301596] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] osapi_compute_unique_server_name_scope = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.301762] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] osapi_compute_workers = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.301920] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] password_length = 12 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.302087] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] periodic_enable = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.302246] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] periodic_fuzzy_delay = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.302410] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] pointer_model = usbtablet {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.302569] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] preallocate_images = none {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.302723] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] publish_errors = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.302850] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] pybasedir = /opt/stack/nova {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.303010] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ram_allocation_ratio = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.303175] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] rate_limit_burst = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.303340] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] rate_limit_except_level = CRITICAL {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.303499] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] rate_limit_interval = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.303656] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] reboot_timeout = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.303812] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] reclaim_instance_interval = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.303964] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] record = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.304184] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] reimage_timeout_per_gb = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.304364] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] report_interval = 120 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.304525] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] rescue_timeout = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.304683] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] reserved_host_cpus = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.304842] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] reserved_host_disk_mb = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.304998] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] reserved_host_memory_mb = 512 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.305173] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] reserved_huge_pages = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.305334] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] resize_confirm_window = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.305491] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] resize_fs_using_block_device = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.305647] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] resume_guests_state_on_host_boot = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.305812] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.305971] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] rpc_response_timeout = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.306158] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] run_external_periodic_tasks = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.306334] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] running_deleted_instance_action = reap {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.306494] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] running_deleted_instance_poll_interval = 1800 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.306650] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] running_deleted_instance_timeout = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.306808] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler_instance_sync_interval = 120 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.306971] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_down_time = 720 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.307150] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] servicegroup_driver = db {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.307307] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] shell_completion = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.307464] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] shelved_offload_time = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.307619] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] shelved_poll_interval = 3600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.307780] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] shutdown_timeout = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.307936] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] source_is_ipv6 = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.308102] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ssl_only = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.308348] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.308536] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] sync_power_state_interval = 600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.308704] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] sync_power_state_pool_size = 1000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.308870] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] syslog_log_facility = LOG_USER {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.309033] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] tempdir = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.309202] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] timeout_nbd = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.309370] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] transport_url = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.309530] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] update_resources_interval = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.309689] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] use_cow_images = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.309845] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] use_journal = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.310000] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] use_json = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.310170] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] use_rootwrap_daemon = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.310328] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] use_stderr = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.310482] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] use_syslog = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.310632] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vcpu_pin_set = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.310794] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plugging_is_fatal = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.310958] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plugging_timeout = 300 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.311133] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] virt_mkfs = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.311295] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] volume_usage_poll_interval = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.311452] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] watch_log_file = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.311615] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] web = /usr/share/spice-html5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 594.311797] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.311963] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.312139] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.312310] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_concurrency.disable_process_locking = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.743765] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.744296] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.744296] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.744456] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.744641] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.744809] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.744996] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.auth_strategy = keystone {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.745185] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.compute_link_prefix = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.745365] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.745539] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.dhcp_domain = novalocal {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.745708] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.enable_instance_password = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.745873] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.glance_link_prefix = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.746051] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.746264] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.746437] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.instance_list_per_project_cells = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.746599] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.list_records_by_skipping_down_cells = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.746761] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.local_metadata_per_cell = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.746929] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.max_limit = 1000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.747111] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.metadata_cache_expiration = 15 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.747291] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.neutron_default_tenant_id = default {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.747537] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.response_validation = warn {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.747750] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.use_neutron_default_nets = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.747926] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.748105] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.748279] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.748453] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.748626] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.vendordata_dynamic_targets = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.748793] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.vendordata_jsonfile_path = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.748979] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.749194] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.backend = dogpile.cache.memcached {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.749368] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.backend_argument = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.749530] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.backend_expiration_time = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.749699] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.config_prefix = cache.oslo {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.749869] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.dead_timeout = 60.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.750040] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.debug_cache_backend = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.750207] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.enable_retry_client = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.750369] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.enable_socket_keepalive = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.750539] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.enabled = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.750702] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.enforce_fips_mode = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.750865] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.expiration_time = 600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.751040] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.hashclient_retry_attempts = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.751212] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.hashclient_retry_delay = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.751376] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_dead_retry = 300 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.751534] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_password = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.751697] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.751857] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.752027] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_pool_maxsize = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.752195] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.752358] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_sasl_enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.752533] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.752707] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_socket_timeout = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.752855] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.memcache_username = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.753024] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.proxies = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.753193] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.redis_db = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.753351] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.redis_password = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.753518] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.redis_sentinel_service_name = mymaster {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.753689] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.753856] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.redis_server = localhost:6379 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.754046] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.redis_socket_timeout = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.754254] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.redis_username = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.754428] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.retry_attempts = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.754595] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.retry_delay = 0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.754759] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.socket_keepalive_count = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.754922] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.socket_keepalive_idle = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.755098] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.socket_keepalive_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.755262] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.tls_allowed_ciphers = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.755422] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.tls_cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.755579] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.tls_certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.755740] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.tls_enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.755899] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cache.tls_keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.756084] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.756325] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.auth_type = password {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.756500] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.756679] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.catalog_info = volumev3::publicURL {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.756839] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.757009] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.757182] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.cross_az_attach = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.757345] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.debug = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.757508] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.endpoint_template = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.757672] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.http_retries = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.757834] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.757991] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.758176] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.os_region_name = RegionOne {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.758340] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.758499] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cinder.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.758667] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.758825] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.cpu_dedicated_set = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.758982] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.cpu_shared_set = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.759161] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.image_type_exclude_list = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.759324] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.759489] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.max_concurrent_disk_ops = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.759652] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.max_disk_devices_to_attach = -1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.759817] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.759986] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.760165] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.resource_provider_association_refresh = 300 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.760334] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.760500] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.shutdown_retry_interval = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.760680] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.760860] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] conductor.workers = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.761048] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] console.allowed_origins = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.761212] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] console.ssl_ciphers = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.761385] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] console.ssl_minimum_version = default {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.761553] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] consoleauth.enforce_session_timeout = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.761721] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] consoleauth.token_ttl = 600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.761891] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.762060] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.762228] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.762393] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.762554] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.762716] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.762876] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.763043] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.763209] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.763370] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.763528] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.763685] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.763843] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.764036] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.service_type = accelerator {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.764219] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.764382] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.764542] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.764701] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.764882] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.765054] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] cyborg.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.765230] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.asyncio_connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.765393] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.asyncio_slave_connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.765563] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.backend = sqlalchemy {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.765732] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.765897] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.connection_debug = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.766077] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.connection_parameters = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.766273] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.connection_recycle_time = 3600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.766442] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.connection_trace = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.766604] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.db_inc_retry_interval = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.766769] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.db_max_retries = 20 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.766933] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.db_max_retry_interval = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.767124] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.db_retry_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.767308] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.max_overflow = 50 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.767476] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.max_pool_size = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.767639] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.max_retries = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.767807] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.767967] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.mysql_wsrep_sync_wait = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.768148] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.pool_timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.768315] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.retry_interval = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.768475] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.slave_connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.768635] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.sqlite_synchronous = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.768797] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] database.use_db_reconnect = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.768964] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.asyncio_connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.769137] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.asyncio_slave_connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.769310] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.backend = sqlalchemy {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.769477] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.769640] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.connection_debug = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.769807] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.connection_parameters = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.769971] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.connection_recycle_time = 3600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.770149] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.connection_trace = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.770313] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.db_inc_retry_interval = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.770478] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.db_max_retries = 20 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.770643] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.db_max_retry_interval = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.770807] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.db_retry_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.770970] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.max_overflow = 50 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.771146] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.max_pool_size = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.771310] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.max_retries = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.771478] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.771640] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.771799] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.pool_timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.771959] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.retry_interval = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.772131] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.slave_connection = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.772296] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] api_database.sqlite_synchronous = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.772471] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] devices.enabled_mdev_types = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.772646] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.772818] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ephemeral_storage_encryption.default_format = luks {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.772979] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ephemeral_storage_encryption.enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.773157] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.773333] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.api_servers = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.773499] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.773660] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.773823] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.773989] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.774187] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.774355] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.debug = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.774523] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.default_trusted_certificate_ids = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.774687] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.enable_certificate_validation = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.774851] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.enable_rbd_download = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.775018] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.775191] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.775354] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.775514] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.775673] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.775834] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.num_retries = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.776008] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.rbd_ceph_conf = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.776208] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.rbd_connect_timeout = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.776388] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.rbd_pool = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.776559] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.rbd_user = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.776727] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.776889] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.777059] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.777260] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.service_type = image {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.777432] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.777593] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.777751] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.777913] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.778107] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.778279] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.verify_glance_signatures = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.778444] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] glance.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.778609] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] guestfs.debug = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.778776] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.778941] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.auth_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.779114] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.779278] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.779441] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.779600] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.779759] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.779915] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.780088] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.780252] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.780454] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.780638] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.780801] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.780962] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.781137] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.781310] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.service_type = shared-file-system {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.781478] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.share_apply_policy_timeout = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.781641] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.781802] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.781961] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.782136] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.782319] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.782481] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] manila.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.782649] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] mks.enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.783426] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.783426] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] image_cache.manager_interval = 2400 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.783426] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] image_cache.precache_concurrency = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.783552] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] image_cache.remove_unused_base_images = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.783686] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.783853] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.784055] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] image_cache.subdirectory_name = _base {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.784249] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.api_max_retries = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.784418] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.api_retry_interval = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.784578] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.784741] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.auth_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.784901] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.785071] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.785238] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.785404] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.conductor_group = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.785562] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.785720] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.785876] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.786056] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.786248] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.786415] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.786574] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.786737] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.peer_list = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.786894] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.787062] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.787254] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.serial_console_state_timeout = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.787418] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.787586] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.service_type = baremetal {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.787747] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.shard = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.787909] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.788080] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.788242] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.788405] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.788584] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.788744] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ironic.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.788924] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.789109] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] key_manager.fixed_key = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.789297] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.789459] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.barbican_api_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.789619] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.barbican_endpoint = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.789791] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.barbican_endpoint_type = public {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.789951] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.barbican_region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.790128] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.790291] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.790456] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.790614] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.790773] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.790938] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.number_of_retries = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.791114] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.retry_delay = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.791282] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.send_service_user_token = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.791443] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.791599] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.791756] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.verify_ssl = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.791910] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican.verify_ssl_path = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.792085] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.792255] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.auth_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.792415] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.792572] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.792734] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.792896] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.793064] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.793231] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.793393] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] barbican_service_user.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.793558] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.approle_role_id = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.793717] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.approle_secret_id = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.793886] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.kv_mountpoint = secret {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.794078] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.kv_path = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.794259] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.kv_version = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.794423] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.namespace = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.794581] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.root_token_id = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.794739] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.ssl_ca_crt_file = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.794905] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.timeout = 60.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.795080] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.use_ssl = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.795260] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.795430] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.795589] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.795752] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.795911] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.796080] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.796264] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.796430] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.796589] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.796744] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.796900] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.797070] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.797236] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.797395] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.797564] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.service_type = identity {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.797725] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.797883] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.798051] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.798212] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.798395] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.798552] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] keystone.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.798740] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.ceph_mount_options = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.799071] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.799256] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.connection_uri = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.799422] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.cpu_mode = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.799588] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.cpu_model_extra_flags = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.799757] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.cpu_models = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.799925] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.cpu_power_governor_high = performance {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.800105] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.cpu_power_governor_low = powersave {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.800272] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.cpu_power_management = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.800440] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.800609] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.device_detach_attempts = 8 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.800773] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.device_detach_timeout = 20 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.800939] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.disk_cachemodes = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.801110] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.disk_prefix = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.801278] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.enabled_perf_events = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.801441] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.file_backed_memory = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.801607] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.gid_maps = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.801765] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.hw_disk_discard = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.801922] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.hw_machine_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.802101] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.images_rbd_ceph_conf = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.802269] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.802431] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.802602] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.images_rbd_glance_store_name = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.802763] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.images_rbd_pool = rbd {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.802930] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.images_type = default {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.803100] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.images_volume_group = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.803266] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.inject_key = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.803427] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.inject_partition = -2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.803589] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.inject_password = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.803748] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.iscsi_iface = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.803907] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.iser_use_multipath = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.804121] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_bandwidth = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.804308] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.804473] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_downtime = 500 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.804635] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.804796] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.804955] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_inbound_addr = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.805130] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.805296] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_permit_post_copy = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.805455] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_scheme = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.805628] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_timeout_action = abort {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.805788] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_tunnelled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.805945] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_uri = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.806134] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.live_migration_with_native_tls = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.806320] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.max_queues = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.806490] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.806820] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.806993] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.nfs_mount_options = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.807498] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.807498] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.807636] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.num_iser_scan_tries = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.807800] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.num_memory_encrypted_guests = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.807965] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.808144] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.num_pcie_ports = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.808315] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.num_volume_scan_tries = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.808480] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.pmem_namespaces = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.808638] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.quobyte_client_cfg = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.808923] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.809112] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rbd_connect_timeout = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.809285] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.809451] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.809613] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rbd_secret_uuid = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.809772] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rbd_user = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.809936] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.810123] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.remote_filesystem_transport = ssh {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.810287] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rescue_image_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.810445] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rescue_kernel_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.810604] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rescue_ramdisk_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.810772] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.810934] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.rx_queue_size = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.811117] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.smbfs_mount_options = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.811424] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.811605] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.snapshot_compression = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.811772] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.snapshot_image_format = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.811998] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.812183] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.sparse_logical_volumes = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.812351] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.swtpm_enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.812519] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.swtpm_group = tss {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.812686] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.swtpm_user = tss {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.812853] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.sysinfo_serial = unique {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.813021] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.tb_cache_size = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.813188] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.tx_queue_size = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.813353] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.uid_maps = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.813513] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.use_virtio_for_bridges = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.813681] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.virt_type = kvm {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.813847] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.volume_clear = zero {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.814038] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.volume_clear_size = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.814248] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.volume_enforce_multipath = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.814431] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.volume_use_multipath = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.814595] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.vzstorage_cache_path = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.814766] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.814936] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.vzstorage_mount_group = qemu {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.815123] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.vzstorage_mount_opts = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.815340] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.815590] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.815774] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.vzstorage_mount_user = stack {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.815942] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.816148] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.816339] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.auth_type = password {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.816503] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.816662] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.816823] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.816979] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.817151] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.817323] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.default_floating_pool = public {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.817480] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.817643] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.extension_sync_interval = 600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.817804] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.http_retries = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.817967] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.818141] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.818332] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.818510] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.818669] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.818835] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.ovs_bridge = br-int {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.819007] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.physnets = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.819185] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.region_name = RegionOne {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.819349] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.819515] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.service_metadata_proxy = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.819673] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.819839] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.service_type = network {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.819999] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.820172] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.820333] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.820493] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.820829] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.820829] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] neutron.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.821011] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] notifications.bdms_in_notifications = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.821197] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] notifications.default_level = INFO {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.821365] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] notifications.include_share_mapping = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.821539] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] notifications.notification_format = unversioned {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.821701] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] notifications.notify_on_state_change = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.821874] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.822061] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] pci.alias = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.822239] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] pci.device_spec = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.822407] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] pci.report_in_placement = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.822579] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.822751] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.auth_type = password {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.822917] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.823088] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.823252] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.823413] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.823571] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.823726] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.823884] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.default_domain_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.824073] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.default_domain_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.824269] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.domain_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.824436] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.domain_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.824596] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.824758] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.824915] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.825086] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.825249] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.825436] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.password = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.825579] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.project_domain_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.825744] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.project_domain_name = Default {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.825909] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.project_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.826094] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.project_name = service {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.826288] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.region_name = RegionOne {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.826455] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.826618] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.826786] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.service_type = placement {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.826948] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.827123] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.827286] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.827444] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.system_scope = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.827601] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.827759] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.trust_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.827916] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.user_domain_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.828095] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.user_domain_name = Default {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.828261] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.user_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.828434] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.username = nova {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.828630] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.828764] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] placement.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.828940] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.cores = 20 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.829116] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.count_usage_from_placement = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.829289] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.829454] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.injected_file_content_bytes = 10240 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.829617] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.injected_file_path_length = 255 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.829780] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.injected_files = 5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.829941] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.instances = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.830117] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.key_pairs = 100 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.830305] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.metadata_items = 128 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.830483] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.ram = 51200 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.830646] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.recheck_quota = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.830809] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.server_group_members = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.831164] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.server_groups = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.831226] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.831363] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] quota.unified_limits_resource_strategy = require {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.831538] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.831700] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.831862] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.image_metadata_prefilter = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.832035] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.832205] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.max_attempts = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.832370] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.max_placement_results = 1000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.832534] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.832696] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.query_placement_for_image_type_support = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.832857] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.833038] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] scheduler.workers = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.833221] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.833393] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.833572] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.833739] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.833903] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.834108] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.834291] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.834482] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.834650] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.host_subset_size = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.834818] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.834978] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.835159] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.835326] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.835533] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.835665] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.isolated_hosts = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.835824] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.isolated_images = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.835987] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.836186] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.836361] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.836522] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.pci_in_placement = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.836684] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.836846] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.837015] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.837219] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.837396] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.837563] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.837724] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.track_instance_changes = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.837898] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.838080] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] metrics.required = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.838249] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] metrics.weight_multiplier = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.838417] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.838583] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] metrics.weight_setting = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.838893] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.839080] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] serial_console.enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.839259] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] serial_console.port_range = 10000:20000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.839432] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.839600] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.839771] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] serial_console.serialproxy_port = 6083 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.839940] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.840134] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.auth_type = password {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.840350] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.840522] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.840691] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.840854] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.841022] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.841204] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.send_service_user_token = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.841372] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.841568] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] service_user.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.841767] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.agent_enabled = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.841966] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.842300] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.842507] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.842682] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.html5proxy_port = 6082 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.842845] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.image_compression = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.843025] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.jpeg_compression = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.843189] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.playback_compression = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.843355] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.require_secure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.843523] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.server_listen = 127.0.0.1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.843691] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.843968] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.844181] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.streaming_mode = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.844351] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] spice.zlib_compression = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.844519] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] upgrade_levels.baseapi = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.844691] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] upgrade_levels.compute = auto {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.844853] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] upgrade_levels.conductor = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.845021] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] upgrade_levels.scheduler = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.845212] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.845401] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.auth_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.845563] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.845722] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.845882] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.846052] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.846247] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.846419] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.846579] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vendordata_dynamic_auth.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.846754] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.api_retry_count = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.846917] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.ca_file = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.847102] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.cache_prefix = devstack-image-cache {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.847284] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.cluster_name = testcl1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.847447] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.connection_pool_size = 10 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.847609] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.console_delay_seconds = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.847777] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.datastore_regex = ^datastore.* {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.847989] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.848181] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.host_password = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.848354] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.host_port = 443 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.848524] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.host_username = administrator@vsphere.local {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.848695] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.insecure = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.848856] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.integration_bridge = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.849029] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.maximum_objects = 100 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.849195] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.pbm_default_policy = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.849358] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.pbm_enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.849515] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.pbm_wsdl_location = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.849682] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.849840] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.serial_port_proxy_uri = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.849997] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.serial_port_service_uri = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.850178] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.task_poll_interval = 0.5 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.850352] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.use_linked_clone = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.850519] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.vnc_keymap = en-us {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.850683] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.vnc_port = 5900 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.850844] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vmware.vnc_port_total = 10000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.851037] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.auth_schemes = ['none'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.851217] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.851506] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.851687] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.851858] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.novncproxy_port = 6080 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.852054] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.server_listen = 127.0.0.1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.852238] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.852401] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.vencrypt_ca_certs = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.852559] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.vencrypt_client_cert = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.852714] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vnc.vencrypt_client_key = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.852891] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.853064] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.disable_deep_image_inspection = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.853227] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.853389] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.853547] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.853706] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.disable_rootwrap = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.853867] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.enable_numa_live_migration = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.854048] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.854228] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.854388] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.854548] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.libvirt_disable_apic = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.854706] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.854866] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.855035] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.855202] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.855362] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.855520] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.855678] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.855837] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.855992] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.856193] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.856384] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.856552] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.client_socket_timeout = 900 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.856717] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.default_pool_size = 1000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.856884] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.keep_alive = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.857059] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.max_header_line = 16384 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.857275] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.secure_proxy_ssl_header = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.857447] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.ssl_ca_file = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.857608] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.ssl_cert_file = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.857768] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.ssl_key_file = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.857932] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.tcp_keepidle = 600 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.858118] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.858338] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] zvm.ca_file = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.858522] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] zvm.cloud_connector_url = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.858956] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.859155] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] zvm.reachable_timeout = 300 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.859335] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.859512] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.859689] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.connection_string = messaging:// {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.859857] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.enabled = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.860035] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.es_doc_type = notification {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.860205] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.es_scroll_size = 10000 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.860373] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.es_scroll_time = 2m {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.860536] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.filter_error_trace = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.860703] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.hmac_keys = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.860870] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.sentinel_service_name = mymaster {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.861044] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.socket_timeout = 0.1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.861215] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.trace_requests = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.861378] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler.trace_sqlalchemy = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.861611] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler_jaeger.process_tags = {} {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.861793] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler_jaeger.service_name_prefix = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.861960] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] profiler_otlp.service_name_prefix = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.862143] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] remote_debug.host = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.862308] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] remote_debug.port = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.862485] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.862649] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.862811] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.862973] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.863149] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.863314] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.863473] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.863636] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.863797] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.863968] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.864169] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.864350] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.864519] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.864683] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.864846] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.865024] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.865193] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.865357] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.865527] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.865689] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.865876] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.866096] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.866300] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.866471] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.866636] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.866798] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.866959] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.867137] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.867300] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.867462] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.867624] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.ssl = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.867794] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.867963] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.868142] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.868314] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.868483] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.ssl_version = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.868648] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.868833] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.869009] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_notifications.retry = -1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.869223] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.869412] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_messaging_notifications.transport_url = **** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.869588] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.auth_section = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.869773] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.auth_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.869940] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.cafile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.870114] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.certfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.870289] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.collect_timing = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.870449] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.connect_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.870607] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.connect_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.870765] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.endpoint_id = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.870935] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.endpoint_interface = publicURL {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.871107] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.endpoint_override = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.871268] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.endpoint_region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.871426] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.endpoint_service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.871582] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.endpoint_service_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.871742] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.insecure = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.871900] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.keyfile = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.872070] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.max_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.872233] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.min_version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.872392] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.region_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.872550] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.retriable_status_codes = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.872707] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.service_name = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.872862] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.service_type = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.873031] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.split_loggers = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.873195] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.status_code_retries = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.873353] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.status_code_retry_delay = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.873509] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.timeout = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.873667] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.valid_interfaces = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.873821] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_limit.version = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.873994] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_reports.file_event_handler = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.874195] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.874362] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] oslo_reports.log_dir = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.874533] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.874696] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.874856] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.875033] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.875203] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.875366] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.875537] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.875695] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_ovs_privileged.group = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.875853] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.876026] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.876217] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.876383] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] vif_plug_ovs_privileged.user = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.876555] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.flat_interface = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.876736] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.876909] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.877104] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.877294] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.877469] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.877637] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.877800] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.877984] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.878171] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_ovs.isolate_vif = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.878348] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.878514] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.878683] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.878849] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_ovs.ovsdb_interface = native {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.879015] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] os_vif_ovs.per_port_bridge = False {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.879193] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] privsep_osbrick.capabilities = [21] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.879355] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] privsep_osbrick.group = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.879512] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] privsep_osbrick.helper_command = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.879676] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.879838] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.879995] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] privsep_osbrick.user = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.880242] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.880376] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] nova_sys_admin.group = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.880532] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] nova_sys_admin.helper_command = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.880693] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.880855] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.881024] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] nova_sys_admin.user = None {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 594.881152] env[68244]: DEBUG oslo_service.backend.eventlet.service [None req-5103386b-8566-48a5-89e3-5fd71f9b224e None None] ******************************************************************************** {{(pid=68244) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 594.881586] env[68244]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 595.385406] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Getting list of instances from cluster (obj){ [ 595.385406] env[68244]: value = "domain-c8" [ 595.385406] env[68244]: _type = "ClusterComputeResource" [ 595.385406] env[68244]: } {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 595.386566] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155671ac-4a00-486f-abf7-7880515f6d5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.395780] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Got total of 0 instances {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 595.396415] env[68244]: WARNING nova.virt.vmwareapi.driver [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 595.396889] env[68244]: INFO nova.virt.node [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Generated node identity b885cb16-3bd4-46d8-abd9-28a1bf1058e3 [ 595.397140] env[68244]: INFO nova.virt.node [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Wrote node identity b885cb16-3bd4-46d8-abd9-28a1bf1058e3 to /opt/stack/data/n-cpu-1/compute_id [ 595.900068] env[68244]: WARNING nova.compute.manager [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Compute nodes ['b885cb16-3bd4-46d8-abd9-28a1bf1058e3'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 596.906237] env[68244]: INFO nova.compute.manager [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 597.912132] env[68244]: WARNING nova.compute.manager [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 597.912459] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.912589] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.912737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.912893] env[68244]: DEBUG nova.compute.resource_tracker [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 597.913864] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b83679-2fe9-4b68-adf9-c8cf498bfc56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.922225] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170bcc1f-8e97-45e6-bc0c-11075e7a51d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.935520] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275e94eb-5340-4911-b466-28b0f40990e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.942117] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e994c6c5-7132-497d-b067-12b8759bd2de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.970988] env[68244]: DEBUG nova.compute.resource_tracker [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180962MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 597.971129] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.971350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.474670] env[68244]: WARNING nova.compute.resource_tracker [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] No compute node record for cpu-1:b885cb16-3bd4-46d8-abd9-28a1bf1058e3: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host b885cb16-3bd4-46d8-abd9-28a1bf1058e3 could not be found. [ 598.978984] env[68244]: INFO nova.compute.resource_tracker [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 [ 600.487260] env[68244]: DEBUG nova.compute.resource_tracker [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 600.487635] env[68244]: DEBUG nova.compute.resource_tracker [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 600.648200] env[68244]: INFO nova.scheduler.client.report [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] [req-f016a378-83a8-4581-ba10-4e8fb64ec798] Created resource provider record via placement API for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 600.664759] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0697c956-89bc-459f-8f72-f3710da36d15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.672761] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b90fb09-2c02-4887-823d-b97e8a0a8d2f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.703906] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc8a3fa-a212-4b95-a5b2-abea639b4c58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.711469] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d8ffc5-fd74-4ee6-a3ad-68917e1828a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.724460] env[68244]: DEBUG nova.compute.provider_tree [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 601.261159] env[68244]: DEBUG nova.scheduler.client.report [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 601.261377] env[68244]: DEBUG nova.compute.provider_tree [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 0 to 1 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 601.261562] env[68244]: DEBUG nova.compute.provider_tree [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 601.315830] env[68244]: DEBUG nova.compute.provider_tree [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 1 to 2 during operation: update_traits {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 601.820462] env[68244]: DEBUG nova.compute.resource_tracker [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 601.820835] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.849s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.820896] env[68244]: DEBUG nova.service [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Creating RPC server for service compute {{(pid=68244) start /opt/stack/nova/nova/service.py:186}} [ 601.835309] env[68244]: DEBUG nova.service [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] Join ServiceGroup membership for this service compute {{(pid=68244) start /opt/stack/nova/nova/service.py:203}} [ 601.835531] env[68244]: DEBUG nova.servicegroup.drivers.db [None req-a1c299c5-9cc0-4c66-b24f-48750fa62f0e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68244) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 610.838314] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.342112] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Getting list of instances from cluster (obj){ [ 611.342112] env[68244]: value = "domain-c8" [ 611.342112] env[68244]: _type = "ClusterComputeResource" [ 611.342112] env[68244]: } {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 611.343366] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1099bc-7f08-4caa-a636-7cf33cc4dcee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.352987] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Got total of 0 instances {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 611.353230] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.353524] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Getting list of instances from cluster (obj){ [ 611.353524] env[68244]: value = "domain-c8" [ 611.353524] env[68244]: _type = "ClusterComputeResource" [ 611.353524] env[68244]: } {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 611.354379] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f20c297-db97-4f2c-8539-f79e04d4a94d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.361293] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Got total of 0 instances {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 637.641601] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.641926] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.144575] env[68244]: DEBUG nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 638.697955] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.697955] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.700745] env[68244]: INFO nova.compute.claims [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.771048] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5b593b-4628-4165-a79a-639c179fe187 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.782109] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3296ef73-a1b5-4d27-b775-36879ad155d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.821939] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab4710f-6594-48ae-a7e4-347c8fb3605a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.829616] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e5c111-2208-4b22-b4b1-f5502dd1ae95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.843920] env[68244]: DEBUG nova.compute.provider_tree [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.860247] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "47330950-506d-41c7-b564-30f46a7025a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.860447] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "47330950-506d-41c7-b564-30f46a7025a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.347318] env[68244]: DEBUG nova.scheduler.client.report [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 640.365636] env[68244]: DEBUG nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 640.855456] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.157s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.855860] env[68244]: DEBUG nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 640.893131] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.893482] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.895428] env[68244]: INFO nova.compute.claims [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.074151] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "3a4e045e-8e27-45e4-9c90-8aa16298a096" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.074208] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.362235] env[68244]: DEBUG nova.compute.utils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 641.366963] env[68244]: DEBUG nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 641.366963] env[68244]: DEBUG nova.network.neutron [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 641.434468] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "57504eac-0d7f-4fbe-b08c-6864713cca94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.434904] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.576870] env[68244]: DEBUG nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 641.644419] env[68244]: DEBUG nova.policy [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4c045d7bbe53416f9bdc97493a461a53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2412be643e4d49f593d97f505d19a6c2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 641.870770] env[68244]: DEBUG nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 641.937245] env[68244]: DEBUG nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 641.945825] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.945961] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.026757] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa3f4b3-e490-4ffc-bf56-496c93cbbadc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.036786] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f873d6b8-54f9-4ac5-a1f7-3e96ba1a10b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.070507] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4618366-f171-404c-aed4-cbb4cf266610 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.080254] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461d0e8d-22f9-4c6b-8102-1b5f647c6993 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.099055] env[68244]: DEBUG nova.compute.provider_tree [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.108881] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "c662b964-abc9-41af-85fd-ea1a540e1e23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.109613] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "c662b964-abc9-41af-85fd-ea1a540e1e23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.111839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.440251] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "aebd1200-ae52-4537-a677-24b57b581517" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.440866] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "aebd1200-ae52-4537-a677-24b57b581517" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.455441] env[68244]: DEBUG nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.478020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.601895] env[68244]: DEBUG nova.scheduler.client.report [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 642.614352] env[68244]: DEBUG nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.883036] env[68244]: DEBUG nova.network.neutron [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Successfully created port: a9569b35-556f-4a24-9726-720578e04bf7 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.887197] env[68244]: DEBUG nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 642.931938] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 642.932210] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.932392] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.932533] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.932675] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.932822] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 642.933308] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 642.933308] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 642.933592] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 642.933760] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 642.933928] env[68244]: DEBUG nova.virt.hardware [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 642.934849] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783f44ad-de42-4e28-83e5-7d5e7b2c9af3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.945400] env[68244]: DEBUG nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.954345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c8200b-65b3-4c96-a5da-318b54af484a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.991502] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d6bb8c-41e8-4af1-b989-14982f14ceba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.009022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.073749] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.073979] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.109637] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.110276] env[68244]: DEBUG nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 643.114072] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.003s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.117165] env[68244]: INFO nova.compute.claims [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.162835] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.478163] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.577724] env[68244]: DEBUG nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 643.624564] env[68244]: DEBUG nova.compute.utils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 643.628062] env[68244]: DEBUG nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 643.628062] env[68244]: DEBUG nova.network.neutron [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 643.837941] env[68244]: DEBUG nova.policy [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a067eaee4c946e3a22d0f42412a04af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '944fef8d71194248967d332728d31002', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 644.119183] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.134684] env[68244]: DEBUG nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 644.381444] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc33e55d-fe1c-4b7b-9f11-7b1c4e2fe9ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.394295] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c13f1d-9146-4016-9b0b-25e95cb57135 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.448192] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57ee3de-43d9-4835-9ac5-c004bd70e37a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.458889] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d93e306-87c0-4e09-82c8-3f15b196d6e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.477799] env[68244]: DEBUG nova.compute.provider_tree [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.980582] env[68244]: DEBUG nova.scheduler.client.report [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 645.077904] env[68244]: DEBUG nova.network.neutron [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Successfully created port: 8289ebb7-2b25-4ad3-bda3-0609581f96be {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.141132] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "e8655168-1fe8-4590-90a3-2ad9438d7761" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.141475] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.154194] env[68244]: DEBUG nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 645.187452] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 645.187783] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 645.188194] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 645.188353] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 645.188554] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 645.188949] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 645.189241] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 645.189443] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 645.189824] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 645.190018] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 645.190195] env[68244]: DEBUG nova.virt.hardware [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 645.191127] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc190e1-b2d2-4014-8924-801ffabdd029 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.202534] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6045e96a-19fb-4b0b-beab-fa5b9d5c9e41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.489476] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.489476] env[68244]: DEBUG nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 645.491351] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.013s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.492400] env[68244]: INFO nova.compute.claims [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.645804] env[68244]: DEBUG nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.971152] env[68244]: DEBUG nova.network.neutron [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Successfully updated port: a9569b35-556f-4a24-9726-720578e04bf7 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 645.997293] env[68244]: DEBUG nova.compute.utils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 646.002197] env[68244]: DEBUG nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 646.002197] env[68244]: DEBUG nova.network.neutron [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 646.083726] env[68244]: DEBUG nova.policy [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2dce40a2ad8a46dcaf896e92a010e6ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6004a8c148a4dc3a3a370d043407f3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 646.183185] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.478553] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "refresh_cache-f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.478553] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquired lock "refresh_cache-f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.478553] env[68244]: DEBUG nova.network.neutron [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.503322] env[68244]: DEBUG nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 646.545773] env[68244]: DEBUG nova.network.neutron [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Successfully created port: a34fb069-2c07-4bff-b9e7-6f4cad7240ff {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.715741] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38565f5a-f75e-46f0-9c1f-4c83274056e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.724321] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74195ed5-4bf6-4f74-9ae5-575678d052aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.759948] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde3c953-dc59-40c9-932b-b8a1ac35404f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.767986] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb013d7-8872-457e-8d3a-6c835f3ce45d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.783166] env[68244]: DEBUG nova.compute.provider_tree [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.165656] env[68244]: DEBUG nova.network.neutron [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.286790] env[68244]: DEBUG nova.scheduler.client.report [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 647.319559] env[68244]: DEBUG nova.network.neutron [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Successfully updated port: 8289ebb7-2b25-4ad3-bda3-0609581f96be {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 647.367873] env[68244]: DEBUG nova.compute.manager [req-d1513122-0b7e-45ad-9e36-3938224b5c66 req-d3225893-a4ae-4f5c-8a1d-55591ad7c50b service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Received event network-vif-plugged-a9569b35-556f-4a24-9726-720578e04bf7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 647.369037] env[68244]: DEBUG oslo_concurrency.lockutils [req-d1513122-0b7e-45ad-9e36-3938224b5c66 req-d3225893-a4ae-4f5c-8a1d-55591ad7c50b service nova] Acquiring lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.369037] env[68244]: DEBUG oslo_concurrency.lockutils [req-d1513122-0b7e-45ad-9e36-3938224b5c66 req-d3225893-a4ae-4f5c-8a1d-55591ad7c50b service nova] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.369037] env[68244]: DEBUG oslo_concurrency.lockutils [req-d1513122-0b7e-45ad-9e36-3938224b5c66 req-d3225893-a4ae-4f5c-8a1d-55591ad7c50b service nova] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.369037] env[68244]: DEBUG nova.compute.manager [req-d1513122-0b7e-45ad-9e36-3938224b5c66 req-d3225893-a4ae-4f5c-8a1d-55591ad7c50b service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] No waiting events found dispatching network-vif-plugged-a9569b35-556f-4a24-9726-720578e04bf7 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 647.369037] env[68244]: WARNING nova.compute.manager [req-d1513122-0b7e-45ad-9e36-3938224b5c66 req-d3225893-a4ae-4f5c-8a1d-55591ad7c50b service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Received unexpected event network-vif-plugged-a9569b35-556f-4a24-9726-720578e04bf7 for instance with vm_state building and task_state spawning. [ 647.523587] env[68244]: DEBUG nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 647.570709] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 647.572806] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.572806] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 647.572806] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.577240] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 647.577429] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 647.577709] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 647.577940] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 647.578019] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 647.578191] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 647.578370] env[68244]: DEBUG nova.virt.hardware [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 647.579307] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605a6735-4369-4610-be92-5c57c277eb53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.589455] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5fad78-3f6c-4e3a-b87d-325d912ead68 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.597994] env[68244]: DEBUG nova.network.neutron [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Updating instance_info_cache with network_info: [{"id": "a9569b35-556f-4a24-9726-720578e04bf7", "address": "fa:16:3e:6f:d1:fa", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9569b35-55", "ovs_interfaceid": "a9569b35-556f-4a24-9726-720578e04bf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.796044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.796044] env[68244]: DEBUG nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 647.797470] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.790s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.799378] env[68244]: INFO nova.compute.claims [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.823833] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "refresh_cache-47330950-506d-41c7-b564-30f46a7025a7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.827978] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquired lock "refresh_cache-47330950-506d-41c7-b564-30f46a7025a7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.827978] env[68244]: DEBUG nova.network.neutron [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.106148] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Releasing lock "refresh_cache-f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.106148] env[68244]: DEBUG nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Instance network_info: |[{"id": "a9569b35-556f-4a24-9726-720578e04bf7", "address": "fa:16:3e:6f:d1:fa", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9569b35-55", "ovs_interfaceid": "a9569b35-556f-4a24-9726-720578e04bf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 648.106319] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:d1:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9569b35-556f-4a24-9726-720578e04bf7', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.128750] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 648.131373] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03b35c49-ec32-4cb8-b249-85f71c0d1c49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.144406] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Created folder: OpenStack in parent group-v4. [ 648.144732] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Creating folder: Project (2412be643e4d49f593d97f505d19a6c2). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 648.145362] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49a2f445-fa68-4824-b69e-be8e2dc3434e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.158452] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Created folder: Project (2412be643e4d49f593d97f505d19a6c2) in parent group-v558876. [ 648.159103] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Creating folder: Instances. Parent ref: group-v558877. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 648.159579] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fa6a507-b16b-4e35-b022-f0d430e06135 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.171325] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Created folder: Instances in parent group-v558877. [ 648.171770] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 648.172158] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 648.172469] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51b8d4c7-945f-49d9-a8a4-ce186222711c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.195766] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 648.195766] env[68244]: value = "task-2779828" [ 648.195766] env[68244]: _type = "Task" [ 648.195766] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.205573] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779828, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.305859] env[68244]: DEBUG nova.compute.utils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 648.314825] env[68244]: DEBUG nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 648.315073] env[68244]: DEBUG nova.network.neutron [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 648.377178] env[68244]: DEBUG nova.policy [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '918658919a8c4d4e888f9a63053e5ffe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e80cfa81cd442f9af3bf027b9059123', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 648.417339] env[68244]: DEBUG nova.network.neutron [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.706491] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779828, 'name': CreateVM_Task, 'duration_secs': 0.345642} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.707449] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 648.727659] env[68244]: DEBUG oslo_vmware.service [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e6fa09-2528-49b8-860e-611fa05fa1cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.735190] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.735406] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.736670] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 648.736967] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fd37239-2ef5-4dfb-8f23-b4f4bc5b3b4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.748942] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 648.748942] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52942d08-dd15-e339-2333-0d3ac3cd4849" [ 648.748942] env[68244]: _type = "Task" [ 648.748942] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.757957] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52942d08-dd15-e339-2333-0d3ac3cd4849, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.819158] env[68244]: DEBUG nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 648.936562] env[68244]: DEBUG nova.network.neutron [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Successfully updated port: a34fb069-2c07-4bff-b9e7-6f4cad7240ff {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.976507] env[68244]: DEBUG nova.network.neutron [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Updating instance_info_cache with network_info: [{"id": "8289ebb7-2b25-4ad3-bda3-0609581f96be", "address": "fa:16:3e:a5:9c:45", "network": {"id": "5d647318-d591-470e-b599-6703584b707a", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1158812997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "944fef8d71194248967d332728d31002", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8289ebb7-2b", "ovs_interfaceid": "8289ebb7-2b25-4ad3-bda3-0609581f96be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.998615] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb0a290-8097-4e36-a58a-5f5ec68637f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.007337] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9e620f-231f-4783-89b3-646c407b8491 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.046149] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1edb08-a987-4623-bac7-5c1f79566faf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.053643] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83e5216-b03f-4a5f-8ab2-0b3c1149cfd8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.067567] env[68244]: DEBUG nova.compute.provider_tree [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.237692] env[68244]: DEBUG nova.network.neutron [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Successfully created port: 0d74c09f-0ee9-498b-a744-56d26babef9c {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.260455] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.261178] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 649.261460] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.262064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.262524] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.263180] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b747430e-9267-403b-a4e3-a9135c447151 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.273596] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.273822] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 649.274762] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f6ccad-1f0f-49e8-b32f-a75b2e764eda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.284280] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-350154f8-19a2-484d-8937-0c9c52c6d658 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.291023] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 649.291023] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52928366-7e01-9a23-6bb0-9a53eb17d2d4" [ 649.291023] env[68244]: _type = "Task" [ 649.291023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.305955] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52928366-7e01-9a23-6bb0-9a53eb17d2d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.446173] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "refresh_cache-3a4e045e-8e27-45e4-9c90-8aa16298a096" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.446173] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired lock "refresh_cache-3a4e045e-8e27-45e4-9c90-8aa16298a096" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.446173] env[68244]: DEBUG nova.network.neutron [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.481202] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Releasing lock "refresh_cache-47330950-506d-41c7-b564-30f46a7025a7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.481523] env[68244]: DEBUG nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Instance network_info: |[{"id": "8289ebb7-2b25-4ad3-bda3-0609581f96be", "address": "fa:16:3e:a5:9c:45", "network": {"id": "5d647318-d591-470e-b599-6703584b707a", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1158812997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "944fef8d71194248967d332728d31002", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8289ebb7-2b", "ovs_interfaceid": "8289ebb7-2b25-4ad3-bda3-0609581f96be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 649.481943] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:9c:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da0e5087-d65b-416f-90fe-beaa9c534ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8289ebb7-2b25-4ad3-bda3-0609581f96be', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 649.495169] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Creating folder: Project (944fef8d71194248967d332728d31002). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.495169] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-155ceb4c-2aad-4c73-9853-f86130a25525 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.509144] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Created folder: Project (944fef8d71194248967d332728d31002) in parent group-v558876. [ 649.509144] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Creating folder: Instances. Parent ref: group-v558880. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.509144] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e21c9bfc-8458-4244-9944-0dbff2d66559 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.523156] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Created folder: Instances in parent group-v558880. [ 649.523156] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 649.523156] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 649.523156] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f49a40e1-b566-42fb-9646-fcf1ca8990ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.550089] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.550089] env[68244]: value = "task-2779831" [ 649.550089] env[68244]: _type = "Task" [ 649.550089] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.558627] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779831, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.574825] env[68244]: DEBUG nova.scheduler.client.report [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.805882] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Preparing fetch location {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 649.809333] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Creating directory with path [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.809333] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cab555a-1dba-410c-b7c0-76881e81df8f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.830531] env[68244]: DEBUG nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 649.835272] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Created directory with path [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.836552] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Fetch image to [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 649.836552] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Downloading image file data 9aa0b4d1-af1b-4141-9ca6-95525b722d7e to [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68244) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 649.837427] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6692b6-b6e2-4c6e-a68c-3de088828449 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.846991] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acea1a3d-a48d-47c6-a2ed-f0b073f01c16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.861883] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9f6ee1-eb46-4995-ae88-d2d89768effe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.906350] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 649.906441] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.906603] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.906803] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.907082] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.907291] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 649.907828] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 649.907973] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 649.908175] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 649.908339] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 649.908565] env[68244]: DEBUG nova.virt.hardware [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 649.909419] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384218ad-2289-4e50-9858-9a09ae15a09b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.912800] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350083bb-ff32-4396-8209-1eb25092c016 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.921109] env[68244]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7fbc98cc-b583-4d23-8cb3-245d09ae1873 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.931249] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c90f7e-d2e1-444c-85ed-4692a96980cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.951504] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Downloading image file data 9aa0b4d1-af1b-4141-9ca6-95525b722d7e to the data store datastore2 {{(pid=68244) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 650.019032] env[68244]: DEBUG nova.network.neutron [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.032896] env[68244]: DEBUG oslo_vmware.rw_handles [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68244) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 650.099634] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.100161] env[68244]: DEBUG nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 650.102684] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.108767] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.947s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.110575] env[68244]: INFO nova.compute.claims [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.113608] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.116455] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.120826] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.121334] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.121580] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.121769] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.121931] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 650.123152] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.127602] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779831, 'name': CreateVM_Task, 'duration_secs': 0.334636} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.127866] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.129322] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.129586] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.129946] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 650.130494] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5f3a434-39f9-4b17-ba5d-ed9531d7435f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.136358] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 650.136358] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524c914f-e222-baa2-ff1f-a41c3b57e798" [ 650.136358] env[68244]: _type = "Task" [ 650.136358] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.147277] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524c914f-e222-baa2-ff1f-a41c3b57e798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.355041] env[68244]: DEBUG nova.network.neutron [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Updating instance_info_cache with network_info: [{"id": "a34fb069-2c07-4bff-b9e7-6f4cad7240ff", "address": "fa:16:3e:b3:a6:14", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa34fb069-2c", "ovs_interfaceid": "a34fb069-2c07-4bff-b9e7-6f4cad7240ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.466727] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.467163] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.606913] env[68244]: DEBUG nova.compute.utils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 650.608588] env[68244]: DEBUG nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 650.632820] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.656603] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.656603] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.656603] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.722854] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Received event network-changed-a9569b35-556f-4a24-9726-720578e04bf7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 650.722854] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Refreshing instance network info cache due to event network-changed-a9569b35-556f-4a24-9726-720578e04bf7. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 650.722854] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquiring lock "refresh_cache-f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.722854] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquired lock "refresh_cache-f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.722854] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Refreshing network info cache for port a9569b35-556f-4a24-9726-720578e04bf7 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 650.770163] env[68244]: DEBUG oslo_vmware.rw_handles [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Completed reading data from the image iterator. {{(pid=68244) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 650.770371] env[68244]: DEBUG oslo_vmware.rw_handles [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 650.858801] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Releasing lock "refresh_cache-3a4e045e-8e27-45e4-9c90-8aa16298a096" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.860295] env[68244]: DEBUG nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Instance network_info: |[{"id": "a34fb069-2c07-4bff-b9e7-6f4cad7240ff", "address": "fa:16:3e:b3:a6:14", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa34fb069-2c", "ovs_interfaceid": "a34fb069-2c07-4bff-b9e7-6f4cad7240ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 650.860477] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:a6:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a34fb069-2c07-4bff-b9e7-6f4cad7240ff', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.876179] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Creating folder: Project (e6004a8c148a4dc3a3a370d043407f3e). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.877123] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Downloaded image file data 9aa0b4d1-af1b-4141-9ca6-95525b722d7e to vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68244) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 650.880958] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Caching image {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 650.880958] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Copying Virtual Disk [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk to [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 650.880958] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25aa7523-01e1-4291-87a1-fbec5d125ec6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.884739] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9b03dad-d3f2-4fd0-b806-607e5f2c28d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.894699] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 650.894699] env[68244]: value = "task-2779833" [ 650.894699] env[68244]: _type = "Task" [ 650.894699] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.903547] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Created folder: Project (e6004a8c148a4dc3a3a370d043407f3e) in parent group-v558876. [ 650.903838] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Creating folder: Instances. Parent ref: group-v558883. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.904648] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ebe4ab1-2891-4cf9-9903-de8fa6dfdb5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.912072] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779833, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.922477] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Created folder: Instances in parent group-v558883. [ 650.922477] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.922477] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 650.922477] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d025ee9-3453-43f5-9de0-14dfac5a71f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.942960] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.942960] env[68244]: value = "task-2779835" [ 650.942960] env[68244]: _type = "Task" [ 650.942960] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.952671] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779835, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.971170] env[68244]: DEBUG nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 651.043355] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "d81bdefa-9c23-413b-9670-bbb2139084f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.043891] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.110415] env[68244]: DEBUG nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 651.264738] env[68244]: DEBUG nova.network.neutron [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Successfully updated port: 0d74c09f-0ee9-498b-a744-56d26babef9c {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.339931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee2d9c2-3acb-4da6-a8a6-26d11e5ac037 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.348978] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0945b1e3-6e67-4e86-9cc6-5e5ca81278d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.389743] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7fb543-c01d-422c-b85d-4a4be7e06e23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.402885] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be87b49-e176-4ea9-90ce-a9eba5d65077 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.413765] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779833, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.423772] env[68244]: DEBUG nova.compute.provider_tree [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.452646] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779835, 'name': CreateVM_Task, 'duration_secs': 0.368745} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.452789] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 651.453658] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.453804] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.454085] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 651.454388] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b907569-567e-4b0d-a969-c796bb1af2ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.459719] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 651.459719] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527eb60e-1c14-a537-cfc0-c88e98061451" [ 651.459719] env[68244]: _type = "Task" [ 651.459719] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.469366] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527eb60e-1c14-a537-cfc0-c88e98061451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.493559] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.777174] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "refresh_cache-57504eac-0d7f-4fbe-b08c-6864713cca94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.777174] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "refresh_cache-57504eac-0d7f-4fbe-b08c-6864713cca94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.777174] env[68244]: DEBUG nova.network.neutron [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.858072] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Updated VIF entry in instance network info cache for port a9569b35-556f-4a24-9726-720578e04bf7. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 651.858426] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Updating instance_info_cache with network_info: [{"id": "a9569b35-556f-4a24-9726-720578e04bf7", "address": "fa:16:3e:6f:d1:fa", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9569b35-55", "ovs_interfaceid": "a9569b35-556f-4a24-9726-720578e04bf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.908457] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779833, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.750947} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.908457] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Copied Virtual Disk [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk to [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 651.909123] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Deleting the datastore file [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.909426] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba40ba52-0ea0-44d9-9a46-fa78273b4b00 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.917244] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 651.917244] env[68244]: value = "task-2779836" [ 651.917244] env[68244]: _type = "Task" [ 651.917244] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.930240] env[68244]: DEBUG nova.scheduler.client.report [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 651.934303] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.970420] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.970815] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.971083] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.124315] env[68244]: DEBUG nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 652.158627] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.158940] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.159043] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.159224] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.159370] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.160576] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.163237] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.163416] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.163585] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.163745] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.163912] env[68244]: DEBUG nova.virt.hardware [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.166181] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa3b75e-d3e0-4cd5-ba4d-affbc97eea8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.176131] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7907f9c-1502-4a8d-b9dc-04891611a3e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.196043] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.202544] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Creating folder: Project (0f86e682d3094417a1e1083bb1006fdf). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.203581] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d68bc3e-b16a-4f9d-9bfa-388979be9022 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.215195] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Created folder: Project (0f86e682d3094417a1e1083bb1006fdf) in parent group-v558876. [ 652.215392] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Creating folder: Instances. Parent ref: group-v558886. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.215755] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0eea883-2908-4cb3-ae09-2410fd1a8746 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.226535] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Created folder: Instances in parent group-v558886. [ 652.226535] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.226535] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.226535] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-813f313b-d463-46d7-b50f-2edb143bbe69 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.245831] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.245831] env[68244]: value = "task-2779839" [ 652.245831] env[68244]: _type = "Task" [ 652.245831] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.255091] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779839, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.294523] env[68244]: DEBUG nova.compute.manager [req-199b266d-43ff-4876-a29e-979db948dbe0 req-0e89d971-5110-4a28-b31b-db3bd8931913 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Received event network-vif-plugged-0d74c09f-0ee9-498b-a744-56d26babef9c {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 652.294738] env[68244]: DEBUG oslo_concurrency.lockutils [req-199b266d-43ff-4876-a29e-979db948dbe0 req-0e89d971-5110-4a28-b31b-db3bd8931913 service nova] Acquiring lock "57504eac-0d7f-4fbe-b08c-6864713cca94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.294962] env[68244]: DEBUG oslo_concurrency.lockutils [req-199b266d-43ff-4876-a29e-979db948dbe0 req-0e89d971-5110-4a28-b31b-db3bd8931913 service nova] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.296214] env[68244]: DEBUG oslo_concurrency.lockutils [req-199b266d-43ff-4876-a29e-979db948dbe0 req-0e89d971-5110-4a28-b31b-db3bd8931913 service nova] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.296214] env[68244]: DEBUG nova.compute.manager [req-199b266d-43ff-4876-a29e-979db948dbe0 req-0e89d971-5110-4a28-b31b-db3bd8931913 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] No waiting events found dispatching network-vif-plugged-0d74c09f-0ee9-498b-a744-56d26babef9c {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 652.296435] env[68244]: WARNING nova.compute.manager [req-199b266d-43ff-4876-a29e-979db948dbe0 req-0e89d971-5110-4a28-b31b-db3bd8931913 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Received unexpected event network-vif-plugged-0d74c09f-0ee9-498b-a744-56d26babef9c for instance with vm_state building and task_state spawning. [ 652.363059] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Releasing lock "refresh_cache-f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.363059] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Received event network-vif-plugged-8289ebb7-2b25-4ad3-bda3-0609581f96be {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 652.363225] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquiring lock "47330950-506d-41c7-b564-30f46a7025a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.363392] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Lock "47330950-506d-41c7-b564-30f46a7025a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.364271] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Lock "47330950-506d-41c7-b564-30f46a7025a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.364271] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] No waiting events found dispatching network-vif-plugged-8289ebb7-2b25-4ad3-bda3-0609581f96be {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 652.364271] env[68244]: WARNING nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Received unexpected event network-vif-plugged-8289ebb7-2b25-4ad3-bda3-0609581f96be for instance with vm_state building and task_state spawning. [ 652.364271] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Received event network-changed-8289ebb7-2b25-4ad3-bda3-0609581f96be {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 652.364518] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Refreshing instance network info cache due to event network-changed-8289ebb7-2b25-4ad3-bda3-0609581f96be. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 652.364638] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquiring lock "refresh_cache-47330950-506d-41c7-b564-30f46a7025a7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.364848] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquired lock "refresh_cache-47330950-506d-41c7-b564-30f46a7025a7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.364952] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Refreshing network info cache for port 8289ebb7-2b25-4ad3-bda3-0609581f96be {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.385360] env[68244]: DEBUG nova.network.neutron [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.427840] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02476} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.428429] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.428677] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Moving file from [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351/9aa0b4d1-af1b-4141-9ca6-95525b722d7e to [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e. {{(pid=68244) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 652.428949] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-d3f1f6ba-1851-4116-bc74-78b0cffa0c78 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.438195] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.438195] env[68244]: DEBUG nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 652.447165] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.967s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.447549] env[68244]: INFO nova.compute.claims [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.450892] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 652.450892] env[68244]: value = "task-2779840" [ 652.450892] env[68244]: _type = "Task" [ 652.450892] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.462799] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779840, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.755023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.755302] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.764402] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779839, 'name': CreateVM_Task, 'duration_secs': 0.301982} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.764644] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 652.765183] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.765374] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.765770] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 652.766041] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb190c1-f3e6-4f93-9b34-1acf6638b007 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.771819] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 652.771819] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256a620-8c97-8881-db0c-46eb3cb8cbda" [ 652.771819] env[68244]: _type = "Task" [ 652.771819] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.783125] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256a620-8c97-8881-db0c-46eb3cb8cbda, 'name': SearchDatastore_Task, 'duration_secs': 0.00864} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.783381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.783631] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 652.783854] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.811547] env[68244]: DEBUG nova.network.neutron [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Updating instance_info_cache with network_info: [{"id": "0d74c09f-0ee9-498b-a744-56d26babef9c", "address": "fa:16:3e:ef:c3:d5", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d74c09f-0e", "ovs_interfaceid": "0d74c09f-0ee9-498b-a744-56d26babef9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.955835] env[68244]: DEBUG nova.compute.utils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 652.964648] env[68244]: DEBUG nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 652.979994] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779840, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027231} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.980288] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] File moved {{(pid=68244) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 652.980483] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Cleaning up location [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351 {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 652.980644] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Deleting the datastore file [datastore2] vmware_temp/47f84401-fa21-4a12-9edf-58ecd7646351 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 652.980902] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1003ad2d-a897-4e54-8400-7ae449c9ce16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.990763] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 652.990763] env[68244]: value = "task-2779841" [ 652.990763] env[68244]: _type = "Task" [ 652.990763] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.002894] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.193871] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Updated VIF entry in instance network info cache for port 8289ebb7-2b25-4ad3-bda3-0609581f96be. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 653.194374] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Updating instance_info_cache with network_info: [{"id": "8289ebb7-2b25-4ad3-bda3-0609581f96be", "address": "fa:16:3e:a5:9c:45", "network": {"id": "5d647318-d591-470e-b599-6703584b707a", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1158812997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "944fef8d71194248967d332728d31002", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8289ebb7-2b", "ovs_interfaceid": "8289ebb7-2b25-4ad3-bda3-0609581f96be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.317278] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "refresh_cache-57504eac-0d7f-4fbe-b08c-6864713cca94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.317278] env[68244]: DEBUG nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Instance network_info: |[{"id": "0d74c09f-0ee9-498b-a744-56d26babef9c", "address": "fa:16:3e:ef:c3:d5", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d74c09f-0e", "ovs_interfaceid": "0d74c09f-0ee9-498b-a744-56d26babef9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 653.317455] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:c3:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d74c09f-0ee9-498b-a744-56d26babef9c', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.324864] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Creating folder: Project (4e80cfa81cd442f9af3bf027b9059123). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.325192] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81a42655-8add-4a3a-b7ed-905267c71b9e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.339756] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Created folder: Project (4e80cfa81cd442f9af3bf027b9059123) in parent group-v558876. [ 653.340035] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Creating folder: Instances. Parent ref: group-v558889. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 653.340313] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4dc39221-2a02-4be1-8e8d-b98f57c2276e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.350735] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Created folder: Instances in parent group-v558889. [ 653.351268] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 653.351396] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.351617] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64e5ef28-5ebc-4851-8f20-9adfcf7cbc9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.370989] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.370989] env[68244]: value = "task-2779844" [ 653.370989] env[68244]: _type = "Task" [ 653.370989] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.379742] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779844, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.468075] env[68244]: DEBUG nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 653.507721] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025596} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.508025] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 653.510816] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46cbff88-3561-4e68-9e57-d6778a5c9ae0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.514423] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 653.514423] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b79a9b-e608-0626-3d93-4a2594e2a166" [ 653.514423] env[68244]: _type = "Task" [ 653.514423] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.524422] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b79a9b-e608-0626-3d93-4a2594e2a166, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.697537] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Releasing lock "refresh_cache-47330950-506d-41c7-b564-30f46a7025a7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.697842] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Received event network-vif-plugged-a34fb069-2c07-4bff-b9e7-6f4cad7240ff {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 653.699520] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquiring lock "3a4e045e-8e27-45e4-9c90-8aa16298a096-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.699520] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.699520] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.699520] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] No waiting events found dispatching network-vif-plugged-a34fb069-2c07-4bff-b9e7-6f4cad7240ff {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 653.699520] env[68244]: WARNING nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Received unexpected event network-vif-plugged-a34fb069-2c07-4bff-b9e7-6f4cad7240ff for instance with vm_state building and task_state spawning. [ 653.699745] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Received event network-changed-a34fb069-2c07-4bff-b9e7-6f4cad7240ff {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 653.699745] env[68244]: DEBUG nova.compute.manager [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Refreshing instance network info cache due to event network-changed-a34fb069-2c07-4bff-b9e7-6f4cad7240ff. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 653.699745] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquiring lock "refresh_cache-3a4e045e-8e27-45e4-9c90-8aa16298a096" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.699745] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Acquired lock "refresh_cache-3a4e045e-8e27-45e4-9c90-8aa16298a096" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.699745] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Refreshing network info cache for port a34fb069-2c07-4bff-b9e7-6f4cad7240ff {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 653.795315] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acbda67-176e-4e0c-bc85-a4c80baf32ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.807292] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd27468-9997-4b7a-9d3a-8e8c07bc078c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.844269] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b55cbde-fa11-4716-b2a2-d14c14f091cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.852765] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d5c73b-aca2-42ae-b392-f3e95420fab6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.867788] env[68244]: DEBUG nova.compute.provider_tree [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.882272] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779844, 'name': CreateVM_Task, 'duration_secs': 0.364321} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.883443] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.883749] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.884311] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.884384] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 653.885044] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b76fffa0-3a91-4e76-b781-6289989cdcde {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.890030] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 653.890030] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5235e630-239b-97da-29bc-1b7390c6f2c3" [ 653.890030] env[68244]: _type = "Task" [ 653.890030] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.899499] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5235e630-239b-97da-29bc-1b7390c6f2c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.028789] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b79a9b-e608-0626-3d93-4a2594e2a166, 'name': SearchDatastore_Task, 'duration_secs': 0.008786} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.029272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.029386] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f48156b9-0316-4a9c-9cf0-9dd9d7a932c1/f48156b9-0316-4a9c-9cf0-9dd9d7a932c1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.029725] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.029876] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.030397] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57308451-b7c1-4880-9b6d-f98f06a39fc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.032450] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-564ec789-875f-4d94-aab0-0a8a6067fa7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.039921] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 654.039921] env[68244]: value = "task-2779845" [ 654.039921] env[68244]: _type = "Task" [ 654.039921] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.045286] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.045286] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.045599] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-600cc959-ee75-4150-a4d6-66dc9e5c61cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.053957] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779845, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.058553] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 654.058553] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529456a3-4f1f-6197-7af9-bdf9869a3060" [ 654.058553] env[68244]: _type = "Task" [ 654.058553] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.065537] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529456a3-4f1f-6197-7af9-bdf9869a3060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.372754] env[68244]: DEBUG nova.scheduler.client.report [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.408641] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5235e630-239b-97da-29bc-1b7390c6f2c3, 'name': SearchDatastore_Task, 'duration_secs': 0.00971} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.410373] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.410604] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.410812] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.414781] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "3776b39a-d10b-4068-8b4b-5dc25798e088" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.415090] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.475894] env[68244]: DEBUG nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 654.506542] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 654.506827] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.507037] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 654.507267] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.507453] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 654.507658] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 654.507876] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 654.508084] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 654.508284] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 654.508489] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 654.508714] env[68244]: DEBUG nova.virt.hardware [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 654.509627] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80efc55e-62f6-4a9e-b633-b6e148713867 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.517921] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a763f28d-07cc-47c2-afa6-de4da7d3a70e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.535019] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 654.541313] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Creating folder: Project (81de28b0c664476691122681387abe9d). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 654.541922] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd185350-8a56-40aa-8113-992efe69a80a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.553821] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779845, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449806} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.554086] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f48156b9-0316-4a9c-9cf0-9dd9d7a932c1/f48156b9-0316-4a9c-9cf0-9dd9d7a932c1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 654.554354] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 654.554637] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58603bc9-e5bd-4cb4-81bb-c45f3e0db4d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.557703] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Created folder: Project (81de28b0c664476691122681387abe9d) in parent group-v558876. [ 654.558183] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Creating folder: Instances. Parent ref: group-v558892. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 654.563422] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49d8bc12-d957-484d-84c4-17c76aaa1b8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.567299] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 654.567299] env[68244]: value = "task-2779847" [ 654.567299] env[68244]: _type = "Task" [ 654.567299] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.578110] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529456a3-4f1f-6197-7af9-bdf9869a3060, 'name': SearchDatastore_Task, 'duration_secs': 0.010593} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.582080] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779847, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.582646] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8627be42-4d92-4447-878d-10efd94396ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.590100] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 654.590100] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522b7ba6-4ffa-acb0-4fd1-b0e85938e1e2" [ 654.590100] env[68244]: _type = "Task" [ 654.590100] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.591762] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Created folder: Instances in parent group-v558892. [ 654.592171] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 654.596848] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 654.597127] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f97dd70f-0696-49df-b1a2-9e50ac2a999e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.616860] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522b7ba6-4ffa-acb0-4fd1-b0e85938e1e2, 'name': SearchDatastore_Task, 'duration_secs': 0.00978} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.618490] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.619020] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 47330950-506d-41c7-b564-30f46a7025a7/47330950-506d-41c7-b564-30f46a7025a7.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.619286] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 654.619286] env[68244]: value = "task-2779849" [ 654.619286] env[68244]: _type = "Task" [ 654.619286] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.619511] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.619703] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.619910] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c3c587c-05e0-42a8-bfad-24313f2b465c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.622081] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa1e12e2-ebde-4fc1-8be4-915503e93ceb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.635986] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779849, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.638398] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.638483] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.643432] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 654.643432] env[68244]: value = "task-2779850" [ 654.643432] env[68244]: _type = "Task" [ 654.643432] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.643432] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-216cb82d-24e1-4b44-8a37-1a045548eb9a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.656645] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 654.656645] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bd8cd2-59bc-8182-d790-613f619a4a5e" [ 654.656645] env[68244]: _type = "Task" [ 654.656645] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.656645] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.663744] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bd8cd2-59bc-8182-d790-613f619a4a5e, 'name': SearchDatastore_Task, 'duration_secs': 0.008841} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.664667] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eda835e-2e10-4505-a938-d91eb4d488b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.671571] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 654.671571] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d92655-578b-758a-0bef-1a3bc13746d9" [ 654.671571] env[68244]: _type = "Task" [ 654.671571] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.679843] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d92655-578b-758a-0bef-1a3bc13746d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.723724] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Updated VIF entry in instance network info cache for port a34fb069-2c07-4bff-b9e7-6f4cad7240ff. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 654.724522] env[68244]: DEBUG nova.network.neutron [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Updating instance_info_cache with network_info: [{"id": "a34fb069-2c07-4bff-b9e7-6f4cad7240ff", "address": "fa:16:3e:b3:a6:14", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa34fb069-2c", "ovs_interfaceid": "a34fb069-2c07-4bff-b9e7-6f4cad7240ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.881848] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.881848] env[68244]: DEBUG nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 654.884200] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.765s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.885863] env[68244]: INFO nova.compute.claims [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.082046] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779847, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065128} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.087276] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.089012] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b082c5-8f18-43a0-978d-a10fb1f4dc38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.120986] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] f48156b9-0316-4a9c-9cf0-9dd9d7a932c1/f48156b9-0316-4a9c-9cf0-9dd9d7a932c1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.121319] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a6e05ad-64ec-4c74-b8af-ea9c8d7ee5bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.145444] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779849, 'name': CreateVM_Task, 'duration_secs': 0.436724} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.149539] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 655.149981] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 655.149981] env[68244]: value = "task-2779851" [ 655.149981] env[68244]: _type = "Task" [ 655.149981] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.150473] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.150648] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.150970] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 655.151585] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a335366b-4715-4a2c-b841-f58e0f26ece5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.159392] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459619} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.160261] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 47330950-506d-41c7-b564-30f46a7025a7/47330950-506d-41c7-b564-30f46a7025a7.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.160502] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.161150] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-389e282b-997e-41e6-ab2f-4141849ca0e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.167318] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779851, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.167689] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 655.167689] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528ecea2-d081-e135-315f-c216ebee781b" [ 655.167689] env[68244]: _type = "Task" [ 655.167689] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.173522] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 655.173522] env[68244]: value = "task-2779852" [ 655.173522] env[68244]: _type = "Task" [ 655.173522] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.186361] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528ecea2-d081-e135-315f-c216ebee781b, 'name': SearchDatastore_Task, 'duration_secs': 0.010396} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.187578] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.187742] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 655.187872] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.192052] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d92655-578b-758a-0bef-1a3bc13746d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009165} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.195156] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.195765] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 3a4e045e-8e27-45e4-9c90-8aa16298a096/3a4e045e-8e27-45e4-9c90-8aa16298a096.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.195957] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.196202] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.196343] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.196559] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83475e43-5cf9-4a03-ae61-70c30568aa19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.198972] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c07ce68c-7fb9-43e9-9463-07d8b2db5c8f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.206958] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 655.206958] env[68244]: value = "task-2779853" [ 655.206958] env[68244]: _type = "Task" [ 655.206958] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.211197] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.211378] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 655.212649] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6bae4bf-8c26-44df-b4c1-c2a488cb5231 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.219517] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779853, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.222521] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 655.222521] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c0c66f-a181-932a-710f-8febcc183d39" [ 655.222521] env[68244]: _type = "Task" [ 655.222521] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.227630] env[68244]: DEBUG oslo_concurrency.lockutils [req-103e56cc-a33c-4d15-a261-879820e36506 req-9ddd4b85-90dc-46c2-9e67-19da86dfc16c service nova] Releasing lock "refresh_cache-3a4e045e-8e27-45e4-9c90-8aa16298a096" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.231013] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c0c66f-a181-932a-710f-8febcc183d39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.348737] env[68244]: DEBUG nova.compute.manager [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Received event network-changed-0d74c09f-0ee9-498b-a744-56d26babef9c {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 655.348737] env[68244]: DEBUG nova.compute.manager [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Refreshing instance network info cache due to event network-changed-0d74c09f-0ee9-498b-a744-56d26babef9c. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 655.348737] env[68244]: DEBUG oslo_concurrency.lockutils [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] Acquiring lock "refresh_cache-57504eac-0d7f-4fbe-b08c-6864713cca94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.348899] env[68244]: DEBUG oslo_concurrency.lockutils [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] Acquired lock "refresh_cache-57504eac-0d7f-4fbe-b08c-6864713cca94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.348899] env[68244]: DEBUG nova.network.neutron [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Refreshing network info cache for port 0d74c09f-0ee9-498b-a744-56d26babef9c {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 655.391209] env[68244]: DEBUG nova.compute.utils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 655.395331] env[68244]: DEBUG nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 655.395450] env[68244]: DEBUG nova.network.neutron [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 655.543248] env[68244]: DEBUG nova.policy [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7397bc613018446db09235839b732ce1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '561ba494fc6a4870895269ee5c203dd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 655.663560] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779851, 'name': ReconfigVM_Task, 'duration_secs': 0.336862} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.663867] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Reconfigured VM instance instance-00000001 to attach disk [datastore2] f48156b9-0316-4a9c-9cf0-9dd9d7a932c1/f48156b9-0316-4a9c-9cf0-9dd9d7a932c1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 655.664607] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f38d54f3-7fe7-421c-b88f-bf7278abff45 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.670164] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 655.670164] env[68244]: value = "task-2779854" [ 655.670164] env[68244]: _type = "Task" [ 655.670164] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.678860] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779854, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.692225] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076574} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.693377] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.697893] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cd595a-b935-45f4-9f3e-db18952a662e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.722079] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 47330950-506d-41c7-b564-30f46a7025a7/47330950-506d-41c7-b564-30f46a7025a7.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.726624] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96e7470f-1b6c-4900-b6c3-679f08ad40cd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.753017] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779853, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461909} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.758852] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 3a4e045e-8e27-45e4-9c90-8aa16298a096/3a4e045e-8e27-45e4-9c90-8aa16298a096.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.759017] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.759419] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 655.759419] env[68244]: value = "task-2779855" [ 655.759419] env[68244]: _type = "Task" [ 655.759419] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.759419] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c0c66f-a181-932a-710f-8febcc183d39, 'name': SearchDatastore_Task, 'duration_secs': 0.01816} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.759750] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c4371b2-bffe-4ebc-9ad8-cc3989f7f771 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.765682] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b22e710-8e71-401a-b0fe-7f6fc49a19b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.773818] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 655.773818] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bde9da-9acc-3655-2cdf-3c45a26d2d10" [ 655.773818] env[68244]: _type = "Task" [ 655.773818] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.786622] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 655.786622] env[68244]: value = "task-2779856" [ 655.786622] env[68244]: _type = "Task" [ 655.786622] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.786918] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779855, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.800036] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bde9da-9acc-3655-2cdf-3c45a26d2d10, 'name': SearchDatastore_Task, 'duration_secs': 0.010406} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.802954] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.803265] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207/ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.803537] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.803760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.803940] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.804221] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b20f058-81ad-44d9-ab75-ad6c90e2a4a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.806294] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc27be92-b021-407e-80f7-432adfb2a15a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.812901] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 655.812901] env[68244]: value = "task-2779857" [ 655.812901] env[68244]: _type = "Task" [ 655.812901] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.816970] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.817752] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 655.819195] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-261244b9-47a0-4f74-99bf-a5a5513656c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.824785] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.828577] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 655.828577] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b2cffc-ab97-dc93-a3a5-a2165da502e8" [ 655.828577] env[68244]: _type = "Task" [ 655.828577] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.836403] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b2cffc-ab97-dc93-a3a5-a2165da502e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.899315] env[68244]: DEBUG nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.080533] env[68244]: DEBUG nova.network.neutron [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Successfully created port: f1165d65-1146-43b0-8b0b-413cebb150aa {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.185713] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779854, 'name': Rename_Task, 'duration_secs': 0.158997} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.186084] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 656.186317] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05973266-85de-4d18-8ced-fe631ae58420 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.191038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798a4649-c6f6-40bd-afba-8f587fab8899 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.195503] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 656.195503] env[68244]: value = "task-2779858" [ 656.195503] env[68244]: _type = "Task" [ 656.195503] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.206822] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7341fa70-0c0d-4d81-98cf-c25fcee04207 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.215437] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.251060] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab72a760-e701-46cf-8400-457d7e316b19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.258960] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da9a3ec-3c44-47da-bbb3-199cadbdf30b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.279547] env[68244]: DEBUG nova.compute.provider_tree [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 656.284881] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779855, 'name': ReconfigVM_Task, 'duration_secs': 0.405204} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.284999] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 47330950-506d-41c7-b564-30f46a7025a7/47330950-506d-41c7-b564-30f46a7025a7.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.285639] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8cf0a57d-08c4-49ce-9c8f-9d504e237e01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.291276] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 656.291276] env[68244]: value = "task-2779859" [ 656.291276] env[68244]: _type = "Task" [ 656.291276] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.302825] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077141} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.303413] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.304181] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee50e634-0801-45e0-a9b4-7ac436e8120d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.310496] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779859, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.330638] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 3a4e045e-8e27-45e4-9c90-8aa16298a096/3a4e045e-8e27-45e4-9c90-8aa16298a096.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.336420] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4d2617a-18ae-48ff-a7aa-0a83e5d48631 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.360019] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460584} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.364487] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207/ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.364735] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.365043] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b2cffc-ab97-dc93-a3a5-a2165da502e8, 'name': SearchDatastore_Task, 'duration_secs': 0.009706} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.365321] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 656.365321] env[68244]: value = "task-2779860" [ 656.365321] env[68244]: _type = "Task" [ 656.365321] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.365465] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c321b616-d9e4-43db-9122-5746f47731ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.367953] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05b1acb9-210b-4d17-a1ef-e417c2f60bc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.377809] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 656.377809] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524055a7-5b45-c269-e695-5aa0fa35ee51" [ 656.377809] env[68244]: _type = "Task" [ 656.377809] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.381098] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779860, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.385174] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 656.385174] env[68244]: value = "task-2779861" [ 656.385174] env[68244]: _type = "Task" [ 656.385174] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.393075] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524055a7-5b45-c269-e695-5aa0fa35ee51, 'name': SearchDatastore_Task, 'duration_secs': 0.008688} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.393593] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.393700] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 57504eac-0d7f-4fbe-b08c-6864713cca94/57504eac-0d7f-4fbe-b08c-6864713cca94.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 656.393950] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.394216] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.394454] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0db720c0-cab3-411b-9d56-57bbbe744de1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.399158] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a4b33e0-acd4-443c-b228-e791800645b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.400925] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.405193] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 656.405193] env[68244]: value = "task-2779862" [ 656.405193] env[68244]: _type = "Task" [ 656.405193] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.412645] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.413239] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.414028] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40b3f044-8e08-44e6-ba84-a78555936ed3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.419974] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.422073] env[68244]: DEBUG nova.network.neutron [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Updated VIF entry in instance network info cache for port 0d74c09f-0ee9-498b-a744-56d26babef9c. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 656.422308] env[68244]: DEBUG nova.network.neutron [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Updating instance_info_cache with network_info: [{"id": "0d74c09f-0ee9-498b-a744-56d26babef9c", "address": "fa:16:3e:ef:c3:d5", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d74c09f-0e", "ovs_interfaceid": "0d74c09f-0ee9-498b-a744-56d26babef9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.425090] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 656.425090] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5205812f-e723-559e-51c7-886e6ab5f6d5" [ 656.425090] env[68244]: _type = "Task" [ 656.425090] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.432999] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5205812f-e723-559e-51c7-886e6ab5f6d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.714384] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779858, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.812770] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779859, 'name': Rename_Task, 'duration_secs': 0.154196} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.813151] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 656.813963] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d78a3b49-e885-4069-b469-c6d2ebf66efe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.818883] env[68244]: ERROR nova.scheduler.client.report [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [req-09dd267e-5ca9-4916-abca-2faaf9419527] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-09dd267e-5ca9-4916-abca-2faaf9419527"}]} [ 656.831255] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 656.831255] env[68244]: value = "task-2779863" [ 656.831255] env[68244]: _type = "Task" [ 656.831255] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.845952] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779863, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.848323] env[68244]: DEBUG nova.scheduler.client.report [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 656.879800] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779860, 'name': ReconfigVM_Task, 'duration_secs': 0.503357} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.880380] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 3a4e045e-8e27-45e4-9c90-8aa16298a096/3a4e045e-8e27-45e4-9c90-8aa16298a096.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.882862] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6578314c-5f92-4bc6-889d-d563b62a0f22 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.884920] env[68244]: DEBUG nova.scheduler.client.report [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 656.885306] env[68244]: DEBUG nova.compute.provider_tree [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 656.899196] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 656.899196] env[68244]: value = "task-2779864" [ 656.899196] env[68244]: _type = "Task" [ 656.899196] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.909027] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105517} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.910222] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.911789] env[68244]: DEBUG nova.scheduler.client.report [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 656.917813] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe2c37b-dc9e-4199-9c51-a87a03e42b50 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.925071] env[68244]: DEBUG nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 656.927271] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779864, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.928430] env[68244]: DEBUG oslo_concurrency.lockutils [req-d0fef762-87f0-4323-ba34-84d213fb461e req-779c4286-0210-4d0f-8654-313f57cd4c59 service nova] Releasing lock "refresh_cache-57504eac-0d7f-4fbe-b08c-6864713cca94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.944800] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779862, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481072} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.954318] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207/ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.957244] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 57504eac-0d7f-4fbe-b08c-6864713cca94/57504eac-0d7f-4fbe-b08c-6864713cca94.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.957745] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.957959] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f65c7516-dcc7-403d-81bb-9d445915924c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.976177] env[68244]: DEBUG nova.scheduler.client.report [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 656.983427] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c325d13-ead3-4e26-acf9-29705456b648 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.991586] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5205812f-e723-559e-51c7-886e6ab5f6d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.991586] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 656.991586] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.991737] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 656.991737] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.991737] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 656.991737] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 656.991737] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 656.991884] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 656.991884] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 656.991884] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 656.991884] env[68244]: DEBUG nova.virt.hardware [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 656.992104] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e23fc55-9e29-4cbe-8ee8-83131b1b1451 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.996724] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-929f1ed5-7f51-4a4c-9e50-3a3e3d6e0f13 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.999515] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 656.999515] env[68244]: value = "task-2779865" [ 656.999515] env[68244]: _type = "Task" [ 656.999515] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.001547] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 657.001547] env[68244]: value = "task-2779866" [ 657.001547] env[68244]: _type = "Task" [ 657.001547] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.012026] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 657.012026] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7df21-aa22-6971-c02f-c37095c4ca22" [ 657.012026] env[68244]: _type = "Task" [ 657.012026] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.014281] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b519beb-50df-4421-a1eb-36ccc0a283cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.028926] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779865, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.033190] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.048177] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7df21-aa22-6971-c02f-c37095c4ca22, 'name': SearchDatastore_Task, 'duration_secs': 0.010517} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.051246] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.051547] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.052216] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb9e4851-753b-4a97-8632-e852a7718619 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.058548] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 657.058548] env[68244]: value = "task-2779867" [ 657.058548] env[68244]: _type = "Task" [ 657.058548] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.066624] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.213151] env[68244]: DEBUG oslo_vmware.api [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779858, 'name': PowerOnVM_Task, 'duration_secs': 0.59846} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.213429] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 657.213713] env[68244]: INFO nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Took 14.33 seconds to spawn the instance on the hypervisor. [ 657.214123] env[68244]: DEBUG nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 657.216319] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb1cbbf-a159-4c17-80ad-225e30bdf723 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.279480] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe755a4-c1bb-4d01-96e1-03c88766c6e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.297282] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c48b2e8-d0aa-4cd5-bdad-0f6e854c6c20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.336239] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd185328-465b-4992-99fa-540c60e4cc33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.344420] env[68244]: DEBUG oslo_vmware.api [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779863, 'name': PowerOnVM_Task, 'duration_secs': 0.504337} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.346855] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 657.348177] env[68244]: INFO nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Took 12.19 seconds to spawn the instance on the hypervisor. [ 657.348177] env[68244]: DEBUG nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 657.349444] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286ad7f6-2171-4a51-94ad-57ab82dc7571 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.353118] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d693205e-473d-4e57-a6bf-fe746ff3a593 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.371912] env[68244]: DEBUG nova.compute.provider_tree [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 657.409226] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779864, 'name': Rename_Task, 'duration_secs': 0.311528} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.409587] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.410300] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59edf8f0-08f4-494f-b89e-2ff1ef30ec9b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.417915] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 657.417915] env[68244]: value = "task-2779868" [ 657.417915] env[68244]: _type = "Task" [ 657.417915] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.428900] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779868, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.520462] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779865, 'name': ReconfigVM_Task, 'duration_secs': 0.367824} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.524756] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Reconfigured VM instance instance-00000005 to attach disk [datastore2] ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207/ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.526215] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0725} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.527289] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f57b28e4-96a7-4152-b13c-a7cec8183c77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.530447] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 657.531437] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8f5a8f-d79a-4258-8c73-9695b02de562 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.562828] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 57504eac-0d7f-4fbe-b08c-6864713cca94/57504eac-0d7f-4fbe-b08c-6864713cca94.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 657.565317] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43543bb1-d672-46e0-a477-f2861cc90981 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.583092] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 657.583092] env[68244]: value = "task-2779869" [ 657.583092] env[68244]: _type = "Task" [ 657.583092] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.592068] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 657.592068] env[68244]: value = "task-2779870" [ 657.592068] env[68244]: _type = "Task" [ 657.592068] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.595541] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499161} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.602514] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 657.602749] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 657.603150] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779869, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.603296] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0e60ec8-75bf-4d73-aa00-ce153694d4e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.611582] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779870, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.613604] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 657.613604] env[68244]: value = "task-2779871" [ 657.613604] env[68244]: _type = "Task" [ 657.613604] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.622896] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.745795] env[68244]: INFO nova.compute.manager [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Took 19.09 seconds to build instance. [ 657.893340] env[68244]: INFO nova.compute.manager [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Took 17.02 seconds to build instance. [ 657.929071] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779868, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.930323] env[68244]: DEBUG nova.scheduler.client.report [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 657.931156] env[68244]: DEBUG nova.compute.provider_tree [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 17 to 18 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 657.931491] env[68244]: DEBUG nova.compute.provider_tree [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 658.100971] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779869, 'name': Rename_Task, 'duration_secs': 0.253992} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.105234] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.105423] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2db4dbfc-c2be-4465-86bf-f55492f54892 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.113103] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779870, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.114803] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 658.114803] env[68244]: value = "task-2779872" [ 658.114803] env[68244]: _type = "Task" [ 658.114803] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.126068] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161602} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.129259] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 658.129566] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.130376] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946e8fe8-fe7e-49ce-a6ae-7c41ee02fdcb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.153028] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 658.153028] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1aaa6026-6bd8-4c9e-9a03-646ea46c879e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.172690] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 658.172690] env[68244]: value = "task-2779873" [ 658.172690] env[68244]: _type = "Task" [ 658.172690] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.181949] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.247303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-af877281-5ffd-4674-b38a-8de8f963e824 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.605s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.395921] env[68244]: DEBUG oslo_concurrency.lockutils [None req-548a30ef-a1c2-4d53-ba44-c7a64f77355a tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "47330950-506d-41c7-b564-30f46a7025a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.535s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.438900] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779868, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.439936] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.556s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.440986] env[68244]: DEBUG nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.444737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.262s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.449276] env[68244]: INFO nova.compute.claims [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 658.570709] env[68244]: DEBUG nova.network.neutron [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Successfully updated port: f1165d65-1146-43b0-8b0b-413cebb150aa {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.611565] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779870, 'name': ReconfigVM_Task, 'duration_secs': 0.564087} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.611565] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 57504eac-0d7f-4fbe-b08c-6864713cca94/57504eac-0d7f-4fbe-b08c-6864713cca94.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 658.611919] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4c8e98e-6aa4-41fb-8c7d-30f7099c8f56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.619169] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 658.619169] env[68244]: value = "task-2779874" [ 658.619169] env[68244]: _type = "Task" [ 658.619169] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.641762] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779872, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.641762] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779874, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.692029] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779873, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.753598] env[68244]: DEBUG nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.892941] env[68244]: DEBUG nova.compute.manager [req-fc45d3b9-204f-45e8-bfb8-a127f6355271 req-3f56ed64-10f9-4137-aef4-48e8028f6a90 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Received event network-vif-plugged-f1165d65-1146-43b0-8b0b-413cebb150aa {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 658.893171] env[68244]: DEBUG oslo_concurrency.lockutils [req-fc45d3b9-204f-45e8-bfb8-a127f6355271 req-3f56ed64-10f9-4137-aef4-48e8028f6a90 service nova] Acquiring lock "aebd1200-ae52-4537-a677-24b57b581517-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.893377] env[68244]: DEBUG oslo_concurrency.lockutils [req-fc45d3b9-204f-45e8-bfb8-a127f6355271 req-3f56ed64-10f9-4137-aef4-48e8028f6a90 service nova] Lock "aebd1200-ae52-4537-a677-24b57b581517-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.893538] env[68244]: DEBUG oslo_concurrency.lockutils [req-fc45d3b9-204f-45e8-bfb8-a127f6355271 req-3f56ed64-10f9-4137-aef4-48e8028f6a90 service nova] Lock "aebd1200-ae52-4537-a677-24b57b581517-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.893745] env[68244]: DEBUG nova.compute.manager [req-fc45d3b9-204f-45e8-bfb8-a127f6355271 req-3f56ed64-10f9-4137-aef4-48e8028f6a90 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] No waiting events found dispatching network-vif-plugged-f1165d65-1146-43b0-8b0b-413cebb150aa {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 658.894421] env[68244]: WARNING nova.compute.manager [req-fc45d3b9-204f-45e8-bfb8-a127f6355271 req-3f56ed64-10f9-4137-aef4-48e8028f6a90 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Received unexpected event network-vif-plugged-f1165d65-1146-43b0-8b0b-413cebb150aa for instance with vm_state building and task_state spawning. [ 658.899609] env[68244]: DEBUG nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.931887] env[68244]: DEBUG oslo_vmware.api [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2779868, 'name': PowerOnVM_Task, 'duration_secs': 1.111919} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.932749] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.932749] env[68244]: INFO nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Took 11.41 seconds to spawn the instance on the hypervisor. [ 658.932749] env[68244]: DEBUG nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.933605] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222f771a-c75f-4500-9c15-5de76aab575e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.951497] env[68244]: DEBUG nova.compute.utils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.954534] env[68244]: DEBUG nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.954694] env[68244]: DEBUG nova.network.neutron [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 659.073642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "refresh_cache-aebd1200-ae52-4537-a677-24b57b581517" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.073765] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquired lock "refresh_cache-aebd1200-ae52-4537-a677-24b57b581517" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.073916] env[68244]: DEBUG nova.network.neutron [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.136307] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779874, 'name': Rename_Task, 'duration_secs': 0.243453} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.139361] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 659.139614] env[68244]: DEBUG oslo_vmware.api [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779872, 'name': PowerOnVM_Task, 'duration_secs': 0.715581} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.139885] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3df9edfc-ad73-41bc-8d46-27b522ed5953 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.141548] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.141820] env[68244]: INFO nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Took 7.02 seconds to spawn the instance on the hypervisor. [ 659.142505] env[68244]: DEBUG nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.144141] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ebd9d7-91eb-418b-b49f-f12a78451f74 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.157731] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 659.157731] env[68244]: value = "task-2779875" [ 659.157731] env[68244]: _type = "Task" [ 659.157731] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.170195] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.171404] env[68244]: DEBUG nova.policy [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cce564ad53b4488da4da70bfbdb62fd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db652bbe2171493cbdd04d3139682698', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.184391] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779873, 'name': ReconfigVM_Task, 'duration_secs': 0.579801} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.184807] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Reconfigured VM instance instance-00000006 to attach disk [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 659.185141] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f38305ed-39f1-4b29-94d5-fb7052787b90 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.191970] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 659.191970] env[68244]: value = "task-2779876" [ 659.191970] env[68244]: _type = "Task" [ 659.191970] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.202686] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779876, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.282358] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.429908] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.461017] env[68244]: DEBUG nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.464152] env[68244]: INFO nova.compute.manager [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Took 17.38 seconds to build instance. [ 659.670711] env[68244]: INFO nova.compute.manager [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Took 16.70 seconds to build instance. [ 659.683042] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779875, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.688380] env[68244]: DEBUG nova.network.neutron [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.704241] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779876, 'name': Rename_Task, 'duration_secs': 0.240958} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.707670] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 659.708230] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33405e61-ceb9-4837-b8bd-fa531f1f464f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.715365] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 659.715365] env[68244]: value = "task-2779877" [ 659.715365] env[68244]: _type = "Task" [ 659.715365] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.729013] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.730814] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7071e81-2efa-444c-bf21-f8f8165178c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.744088] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e27172-0e73-43a6-a7f4-1a77e7a76c8b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.792843] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2edd96e-070e-4b92-b3a5-a72c0858ffd2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.802785] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb74582-c303-4adb-9e48-419030938230 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.819373] env[68244]: DEBUG nova.compute.provider_tree [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.969924] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a4e6899d-caf6-4eaa-a2fb-d94a434f49cc tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.893s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.161372] env[68244]: DEBUG nova.network.neutron [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Successfully created port: 455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.177988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d08efac5-ec23-40be-81c7-24ad9eadb1b7 tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.232s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.190345] env[68244]: DEBUG oslo_vmware.api [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779875, 'name': PowerOnVM_Task, 'duration_secs': 0.653752} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.190898] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 660.190898] env[68244]: INFO nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Took 10.36 seconds to spawn the instance on the hypervisor. [ 660.191085] env[68244]: DEBUG nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 660.192445] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c56f624-9248-4d30-872a-ba4383995728 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.226406] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779877, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.322501] env[68244]: DEBUG nova.scheduler.client.report [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.480362] env[68244]: DEBUG nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.485857] env[68244]: DEBUG nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.490011] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "47330950-506d-41c7-b564-30f46a7025a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.490275] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "47330950-506d-41c7-b564-30f46a7025a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.490478] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "47330950-506d-41c7-b564-30f46a7025a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.490657] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "47330950-506d-41c7-b564-30f46a7025a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.490820] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "47330950-506d-41c7-b564-30f46a7025a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.495787] env[68244]: DEBUG nova.network.neutron [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Updating instance_info_cache with network_info: [{"id": "f1165d65-1146-43b0-8b0b-413cebb150aa", "address": "fa:16:3e:3d:c2:fa", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1165d65-11", "ovs_interfaceid": "f1165d65-1146-43b0-8b0b-413cebb150aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.502560] env[68244]: INFO nova.compute.manager [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Terminating instance [ 660.524930] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.525266] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.525452] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.525673] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.526271] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.526271] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.526385] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.526672] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.526749] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.526922] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.527153] env[68244]: DEBUG nova.virt.hardware [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.528198] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a26007-a359-4d68-af7c-e4e83352b5c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.539434] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d09a48b-7c99-456e-974f-a559153dcda9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.732415] env[68244]: DEBUG oslo_vmware.api [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779877, 'name': PowerOnVM_Task, 'duration_secs': 0.725729} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.732415] env[68244]: INFO nova.compute.manager [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Took 18.29 seconds to build instance. [ 660.733380] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 660.733724] env[68244]: INFO nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Took 6.26 seconds to spawn the instance on the hypervisor. [ 660.733819] env[68244]: DEBUG nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 660.734792] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3688a0f-e284-41c5-85b7-b94b280de3e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.830799] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.831411] env[68244]: DEBUG nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.837130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.204s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.837130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.837130] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 660.837130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.344s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.839801] env[68244]: INFO nova.compute.claims [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.846743] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e09bbbe-b273-4dbd-ad35-0e3e5ce538be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.859195] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c08b2ce-acf1-429a-80f3-b236a5dca5ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.879682] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810d4db6-ba25-463b-a2f1-0a43378c7174 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.886906] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14d5460-9af6-4244-8525-e174280914fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.925827] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181054MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 660.925827] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.002685] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Releasing lock "refresh_cache-aebd1200-ae52-4537-a677-24b57b581517" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.003990] env[68244]: DEBUG nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Instance network_info: |[{"id": "f1165d65-1146-43b0-8b0b-413cebb150aa", "address": "fa:16:3e:3d:c2:fa", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1165d65-11", "ovs_interfaceid": "f1165d65-1146-43b0-8b0b-413cebb150aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 661.004450] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:c2:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1165d65-1146-43b0-8b0b-413cebb150aa', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 661.019616] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Creating folder: Project (561ba494fc6a4870895269ee5c203dd4). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.020339] env[68244]: DEBUG nova.compute.manager [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 661.020529] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.021689] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f779665b-a639-4579-9693-83d251b76731 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.025474] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.026359] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd87b43-013d-441b-8ce3-11bf77a4d070 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.042504] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.042504] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89c0584e-baa3-4b4c-863c-e946bb5d5f56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.045444] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Created folder: Project (561ba494fc6a4870895269ee5c203dd4) in parent group-v558876. [ 661.045856] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Creating folder: Instances. Parent ref: group-v558895. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.046518] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50d64567-99f3-4e8c-bcf4-c3964743f18f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.050304] env[68244]: DEBUG oslo_vmware.api [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 661.050304] env[68244]: value = "task-2779879" [ 661.050304] env[68244]: _type = "Task" [ 661.050304] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.059274] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Created folder: Instances in parent group-v558895. [ 661.059274] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 661.067021] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aebd1200-ae52-4537-a677-24b57b581517] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 661.067021] env[68244]: DEBUG oslo_vmware.api [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.067021] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bc0d3e8-4335-48b9-854f-923db0d0192d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.094742] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 661.094742] env[68244]: value = "task-2779881" [ 661.094742] env[68244]: _type = "Task" [ 661.094742] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.106619] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779881, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.201070] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.202390] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.239770] env[68244]: DEBUG oslo_concurrency.lockutils [None req-acd16e0a-e30c-4e18-beb2-d55920690acd tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.805s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.263409] env[68244]: INFO nova.compute.manager [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Took 18.14 seconds to build instance. [ 661.356023] env[68244]: DEBUG nova.compute.utils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 661.356023] env[68244]: DEBUG nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 661.356339] env[68244]: DEBUG nova.network.neutron [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 661.528182] env[68244]: DEBUG nova.policy [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '813e863e39a449dd915ef45aa553cdab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '207109eb01bd42b081cc66385789ab80', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 661.561459] env[68244]: DEBUG oslo_vmware.api [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779879, 'name': PowerOffVM_Task, 'duration_secs': 0.226542} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.565020] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 661.565020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 661.565020] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b146dcd0-0d82-4025-a565-f958320ed388 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.608644] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779881, 'name': CreateVM_Task, 'duration_secs': 0.381138} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.608832] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aebd1200-ae52-4537-a677-24b57b581517] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 661.609455] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.609874] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.610393] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 661.611148] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-162125b9-2bea-4deb-b2b7-a5b80c1c081b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.617037] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 661.617037] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281f2c2-e0c9-7a8f-ca29-bf09fd2b03eb" [ 661.617037] env[68244]: _type = "Task" [ 661.617037] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.625047] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281f2c2-e0c9-7a8f-ca29-bf09fd2b03eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.668109] env[68244]: DEBUG nova.compute.manager [None req-8a349dfc-dee8-48d7-9a8c-9936b955afeb tempest-ServerDiagnosticsV248Test-68488678 tempest-ServerDiagnosticsV248Test-68488678-project-admin] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.668109] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cbae2d-2ecd-4256-94f7-21d9fd5f95a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.676221] env[68244]: INFO nova.compute.manager [None req-8a349dfc-dee8-48d7-9a8c-9936b955afeb tempest-ServerDiagnosticsV248Test-68488678 tempest-ServerDiagnosticsV248Test-68488678-project-admin] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Retrieving diagnostics [ 661.677038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3953fbc-dbeb-4b88-a6b9-cb842eb5ecf1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.715358] env[68244]: DEBUG nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 661.767619] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7bee193d-5dd1-409c-908b-2b081c1b2cc5 tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "c662b964-abc9-41af-85fd-ea1a540e1e23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.658s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.860426] env[68244]: DEBUG nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 661.922366] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 661.922366] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 661.922366] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Deleting the datastore file [datastore2] 47330950-506d-41c7-b564-30f46a7025a7 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 661.922366] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ed9f031-d9b8-4220-931e-ebb26680c523 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.938220] env[68244]: DEBUG oslo_vmware.api [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for the task: (returnval){ [ 661.938220] env[68244]: value = "task-2779883" [ 661.938220] env[68244]: _type = "Task" [ 661.938220] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.952939] env[68244]: DEBUG oslo_vmware.api [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779883, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.128237] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281f2c2-e0c9-7a8f-ca29-bf09fd2b03eb, 'name': SearchDatastore_Task, 'duration_secs': 0.028123} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.129729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.129729] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 662.129729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.129729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.129987] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.132352] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc2982a0-d7bc-4578-aa08-a347cb746bab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.144368] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.144672] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 662.145652] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eae18220-7216-4df5-928b-8847f9a66c0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.151888] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da3db57-eaad-450a-8771-5e3b2930a5a0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.156334] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 662.156334] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52afe32e-6da7-3a4c-9937-98d8f1e60057" [ 662.156334] env[68244]: _type = "Task" [ 662.156334] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.164805] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f81c558-c06c-42ef-a146-c2ec4bbbdd54 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.172031] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52afe32e-6da7-3a4c-9937-98d8f1e60057, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.203614] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39643ccc-9502-45b7-ab7f-01f25205c6b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.215833] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44fe69a-bb6c-41b2-b678-a1995f3a904d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.239029] env[68244]: DEBUG nova.compute.provider_tree [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.259348] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.276199] env[68244]: DEBUG nova.network.neutron [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Successfully updated port: 455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.320017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "03af8758-fba3-4173-b998-d9e6b3113f8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.320272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.453910] env[68244]: DEBUG oslo_vmware.api [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Task: {'id': task-2779883, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.301203} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.453910] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 662.454399] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 662.454893] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.454893] env[68244]: INFO nova.compute.manager [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Took 1.43 seconds to destroy the instance on the hypervisor. [ 662.455200] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 662.455792] env[68244]: DEBUG nova.compute.manager [-] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 662.456286] env[68244]: DEBUG nova.network.neutron [-] [instance: 47330950-506d-41c7-b564-30f46a7025a7] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 662.666609] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52afe32e-6da7-3a4c-9937-98d8f1e60057, 'name': SearchDatastore_Task, 'duration_secs': 0.016147} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.667545] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20ac5f14-31bc-4dc8-93fe-7294ec43a9c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.675841] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 662.675841] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285bc09-9a24-b9bc-3342-6fa80c162efc" [ 662.675841] env[68244]: _type = "Task" [ 662.675841] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.684916] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285bc09-9a24-b9bc-3342-6fa80c162efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.693891] env[68244]: DEBUG nova.compute.manager [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Received event network-changed-f1165d65-1146-43b0-8b0b-413cebb150aa {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 662.694336] env[68244]: DEBUG nova.compute.manager [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Refreshing instance network info cache due to event network-changed-f1165d65-1146-43b0-8b0b-413cebb150aa. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 662.694700] env[68244]: DEBUG oslo_concurrency.lockutils [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] Acquiring lock "refresh_cache-aebd1200-ae52-4537-a677-24b57b581517" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.695407] env[68244]: DEBUG oslo_concurrency.lockutils [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] Acquired lock "refresh_cache-aebd1200-ae52-4537-a677-24b57b581517" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.695407] env[68244]: DEBUG nova.network.neutron [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Refreshing network info cache for port f1165d65-1146-43b0-8b0b-413cebb150aa {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 662.708925] env[68244]: DEBUG nova.network.neutron [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Successfully created port: 3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.744059] env[68244]: DEBUG nova.scheduler.client.report [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.784776] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.784941] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquired lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.785112] env[68244]: DEBUG nova.network.neutron [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.823221] env[68244]: DEBUG nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 662.871835] env[68244]: DEBUG nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 662.928601] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.928601] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.928601] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.929306] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.929306] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.929306] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.929306] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.929707] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.929707] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.929707] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.930087] env[68244]: DEBUG nova.virt.hardware [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.934284] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56001ef7-bf97-4c34-93a4-432a9ad4c73d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.949953] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd78626-40be-4b78-9e89-cc55f4c83e4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.185886] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285bc09-9a24-b9bc-3342-6fa80c162efc, 'name': SearchDatastore_Task, 'duration_secs': 0.013483} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.186599] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.186599] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aebd1200-ae52-4537-a677-24b57b581517/aebd1200-ae52-4537-a677-24b57b581517.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 663.186741] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a97980a4-2d2f-4c1e-b2f2-eff39fdf4e1c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.194525] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 663.194525] env[68244]: value = "task-2779884" [ 663.194525] env[68244]: _type = "Task" [ 663.194525] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.207347] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.252787] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.253337] env[68244]: DEBUG nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 663.257047] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.975s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.258679] env[68244]: INFO nova.compute.claims [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.356306] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.367425] env[68244]: DEBUG nova.network.neutron [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.712099] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779884, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.777949] env[68244]: DEBUG nova.compute.utils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.780559] env[68244]: DEBUG nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.780927] env[68244]: DEBUG nova.network.neutron [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.941045] env[68244]: INFO nova.compute.manager [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Rebuilding instance [ 663.966575] env[68244]: DEBUG nova.network.neutron [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Updating instance_info_cache with network_info: [{"id": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "address": "fa:16:3e:3e:db:ab", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap455f4a3c-bc", "ovs_interfaceid": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.019400] env[68244]: DEBUG nova.compute.manager [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.020322] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792af62d-5075-41fe-bf7d-c196d3e6fc38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.034675] env[68244]: DEBUG nova.policy [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0c13e099528435296ac3827d8f52e31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2339433c10b4813937eb9968a84324a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 664.184195] env[68244]: DEBUG nova.network.neutron [-] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.213945] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.881241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.216140] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aebd1200-ae52-4537-a677-24b57b581517/aebd1200-ae52-4537-a677-24b57b581517.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 664.216140] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.216140] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc744e45-a8ad-42e8-9ecf-c5638c0f06e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.223618] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 664.223618] env[68244]: value = "task-2779885" [ 664.223618] env[68244]: _type = "Task" [ 664.223618] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.234774] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779885, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.288882] env[68244]: DEBUG nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 664.369798] env[68244]: DEBUG nova.network.neutron [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Updated VIF entry in instance network info cache for port f1165d65-1146-43b0-8b0b-413cebb150aa. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.370433] env[68244]: DEBUG nova.network.neutron [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Updating instance_info_cache with network_info: [{"id": "f1165d65-1146-43b0-8b0b-413cebb150aa", "address": "fa:16:3e:3d:c2:fa", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1165d65-11", "ovs_interfaceid": "f1165d65-1146-43b0-8b0b-413cebb150aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.476184] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Releasing lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.476184] env[68244]: DEBUG nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Instance network_info: |[{"id": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "address": "fa:16:3e:3e:db:ab", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap455f4a3c-bc", "ovs_interfaceid": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 664.476329] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:db:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '455f4a3c-bc0d-49b8-9c1f-685ca84e33a4', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.484317] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Creating folder: Project (db652bbe2171493cbdd04d3139682698). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.487934] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ee40cdb-49c7-4b4a-91c4-4781343602c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.502366] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Created folder: Project (db652bbe2171493cbdd04d3139682698) in parent group-v558876. [ 664.502562] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Creating folder: Instances. Parent ref: group-v558898. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.502802] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2276754e-564b-4b2c-8d2c-0ddc66dda390 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.512533] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Created folder: Instances in parent group-v558898. [ 664.512777] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.515989] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 664.517241] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0e5cab7-faef-4823-a237-4eb519066877 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.553249] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.553249] env[68244]: value = "task-2779888" [ 664.553249] env[68244]: _type = "Task" [ 664.553249] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.560687] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779888, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.651471] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e85e9a4-c973-4607-99e2-937691ba9416 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.659076] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46725295-b7bc-4e22-88ae-e1ad3db60307 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.692757] env[68244]: INFO nova.compute.manager [-] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Took 2.24 seconds to deallocate network for instance. [ 664.695290] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48f33b0-e59c-4db8-b7c9-55a1101fa9af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.707317] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae10b52-a329-48c6-ad89-1e8b2431455f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.722135] env[68244]: DEBUG nova.compute.provider_tree [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.732917] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779885, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171477} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.734606] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.737404] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676e0278-fc25-43bd-b4fa-460de5a1a492 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.766866] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] aebd1200-ae52-4537-a677-24b57b581517/aebd1200-ae52-4537-a677-24b57b581517.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.768203] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fa017df-e1bd-4a91-a14a-57217fd1d3ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.791140] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 664.791140] env[68244]: value = "task-2779889" [ 664.791140] env[68244]: _type = "Task" [ 664.791140] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.803435] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779889, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.878047] env[68244]: DEBUG oslo_concurrency.lockutils [req-29dd0bf3-5650-499e-9511-c0273ef0c5ff req-1fbe1609-869b-4f37-bc2c-0903de9ad170 service nova] Releasing lock "refresh_cache-aebd1200-ae52-4537-a677-24b57b581517" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.976487] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.976824] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.977337] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.977337] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.977337] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.983069] env[68244]: INFO nova.compute.manager [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Terminating instance [ 665.022018] env[68244]: DEBUG nova.network.neutron [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Successfully updated port: 3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.052098] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.052756] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15a18039-a698-4318-abd3-9cfaf32a572a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.066407] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779888, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.066407] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 665.066407] env[68244]: value = "task-2779890" [ 665.066407] env[68244]: _type = "Task" [ 665.066407] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.074557] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.154487] env[68244]: DEBUG nova.network.neutron [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Successfully created port: 86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.203834] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.226557] env[68244]: DEBUG nova.scheduler.client.report [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 665.303780] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779889, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.307446] env[68244]: DEBUG nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 665.332810] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Acquiring lock "3a4e045e-8e27-45e4-9c90-8aa16298a096" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.333556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.333556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Acquiring lock "3a4e045e-8e27-45e4-9c90-8aa16298a096-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.333721] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.333764] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.336561] env[68244]: INFO nova.compute.manager [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Terminating instance [ 665.352035] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 665.352035] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.352035] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.352322] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.352322] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.352322] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 665.352322] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 665.352322] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 665.352503] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 665.352503] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 665.352503] env[68244]: DEBUG nova.virt.hardware [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 665.353506] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48cac98-1711-4b94-8af0-90aff9f02b85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.364040] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dde223f-ebff-40a5-8fb5-c3377f91b2df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.492573] env[68244]: DEBUG nova.compute.manager [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 665.492573] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 665.493768] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608bdd23-ac47-45a9-b0b7-b3b9a31bb1eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.501646] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.501906] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aed8e2ba-04b4-43eb-8b84-599ade3c4ddc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.511799] env[68244]: DEBUG oslo_vmware.api [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 665.511799] env[68244]: value = "task-2779891" [ 665.511799] env[68244]: _type = "Task" [ 665.511799] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.534722] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.534722] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.534722] env[68244]: DEBUG nova.network.neutron [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.534722] env[68244]: DEBUG oslo_vmware.api [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.567312] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779888, 'name': CreateVM_Task, 'duration_secs': 0.759776} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.569912] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.570968] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.571147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.571541] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 665.572150] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38b95e33-23f9-4a2b-a73f-7b0328bace11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.582844] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779890, 'name': PowerOffVM_Task, 'duration_secs': 0.242653} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.583668] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 665.588024] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 665.588024] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46701416-bd5b-453e-84a4-823e6c2700fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.592503] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 665.592503] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5284242f-a116-28f2-ebe5-1474453d2c22" [ 665.592503] env[68244]: _type = "Task" [ 665.592503] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.602417] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 665.603070] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ab63c3b-e2b6-4951-aa09-f0c782757514 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.609187] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5284242f-a116-28f2-ebe5-1474453d2c22, 'name': SearchDatastore_Task, 'duration_secs': 0.020471} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.609471] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.609700] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.609981] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.610064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.610237] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.610491] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88c9d267-020d-4e48-ba48-422f64488d3d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.619586] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.619586] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.620244] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-921f5bf7-65a6-42aa-a648-4ae531ce22cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.633537] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 665.633781] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 665.633968] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Deleting the datastore file [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 665.634967] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 665.634967] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a9256d-811a-dbc6-bb3a-5d5eb1abe4fe" [ 665.634967] env[68244]: _type = "Task" [ 665.634967] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.635236] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-469e1c7f-5262-43e2-80b3-2b343651ae1a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.648796] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a9256d-811a-dbc6-bb3a-5d5eb1abe4fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.650332] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 665.650332] env[68244]: value = "task-2779893" [ 665.650332] env[68244]: _type = "Task" [ 665.650332] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.659454] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.731826] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.732213] env[68244]: DEBUG nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 665.740344] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.310s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.741443] env[68244]: INFO nova.compute.claims [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.804236] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779889, 'name': ReconfigVM_Task, 'duration_secs': 0.743556} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.804500] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Reconfigured VM instance instance-00000007 to attach disk [datastore2] aebd1200-ae52-4537-a677-24b57b581517/aebd1200-ae52-4537-a677-24b57b581517.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.805769] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d731da60-f209-404e-ac6c-44e6a3b36492 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.814051] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 665.814051] env[68244]: value = "task-2779894" [ 665.814051] env[68244]: _type = "Task" [ 665.814051] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.831293] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779894, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.846750] env[68244]: DEBUG nova.compute.manager [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 665.847503] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 665.847904] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1ac728-f2db-423a-bcad-6e5332e26cfd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.861280] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.861555] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a29092e0-cce3-4e3e-9ac8-db3dce8faa11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.868996] env[68244]: DEBUG oslo_vmware.api [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Waiting for the task: (returnval){ [ 665.868996] env[68244]: value = "task-2779895" [ 665.868996] env[68244]: _type = "Task" [ 665.868996] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.879294] env[68244]: DEBUG oslo_vmware.api [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Task: {'id': task-2779895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.026034] env[68244]: DEBUG oslo_vmware.api [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779891, 'name': PowerOffVM_Task, 'duration_secs': 0.272912} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.026375] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 666.026547] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 666.026806] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a4ca69c-42d6-4455-bbeb-9629b19a34ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.094330] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 666.094330] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 666.094330] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Deleting the datastore file [datastore2] f48156b9-0316-4a9c-9cf0-9dd9d7a932c1 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.094330] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee84c674-6193-4db9-8b24-c4445a7d867a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.104522] env[68244]: DEBUG oslo_vmware.api [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for the task: (returnval){ [ 666.104522] env[68244]: value = "task-2779897" [ 666.104522] env[68244]: _type = "Task" [ 666.104522] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.109127] env[68244]: DEBUG nova.network.neutron [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.123444] env[68244]: DEBUG oslo_vmware.api [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779897, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.154529] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a9256d-811a-dbc6-bb3a-5d5eb1abe4fe, 'name': SearchDatastore_Task, 'duration_secs': 0.024941} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.159014] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fff2af83-3d0c-4308-bbf4-ad02eb888c11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.167661] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 666.167661] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c8eb7-047e-a993-9322-1a36e600b79a" [ 666.167661] env[68244]: _type = "Task" [ 666.167661] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.168256] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175066} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.168579] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.168738] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 666.168922] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.179717] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c8eb7-047e-a993-9322-1a36e600b79a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.248741] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.249384] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.251681] env[68244]: DEBUG nova.compute.utils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 666.259035] env[68244]: DEBUG nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 666.259216] env[68244]: DEBUG nova.network.neutron [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 666.268656] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "10957648-8618-4f2c-8b08-5468bca20cfc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.269049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.327330] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779894, 'name': Rename_Task, 'duration_secs': 0.159881} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.327601] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.327847] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45599813-0f40-43c8-9cda-82aa7d8d6721 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.339017] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 666.339017] env[68244]: value = "task-2779898" [ 666.339017] env[68244]: _type = "Task" [ 666.339017] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.348570] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.350196] env[68244]: DEBUG nova.policy [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b04de2afc5cd4b228c125e73072015d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6903b79c37d418aa5a767d9cb537ef4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 666.383586] env[68244]: DEBUG oslo_vmware.api [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Task: {'id': task-2779895, 'name': PowerOffVM_Task, 'duration_secs': 0.171731} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.383586] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 666.383586] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 666.383586] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f1c7016-04e1-45cd-86e4-c32a04476e2c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.457686] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 666.457686] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 666.457686] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Deleting the datastore file [datastore2] 3a4e045e-8e27-45e4-9c90-8aa16298a096 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.457686] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-456bd0e0-d5be-421e-94ed-0086abc72420 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.463600] env[68244]: DEBUG oslo_vmware.api [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Waiting for the task: (returnval){ [ 666.463600] env[68244]: value = "task-2779900" [ 666.463600] env[68244]: _type = "Task" [ 666.463600] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.472525] env[68244]: DEBUG oslo_vmware.api [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Task: {'id': task-2779900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.562100] env[68244]: DEBUG nova.network.neutron [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.615215] env[68244]: DEBUG oslo_vmware.api [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Task: {'id': task-2779897, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263505} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.615499] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.615680] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 666.615855] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.616044] env[68244]: INFO nova.compute.manager [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 666.616323] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 666.616856] env[68244]: DEBUG nova.compute.manager [-] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 666.616950] env[68244]: DEBUG nova.network.neutron [-] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.685270] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c8eb7-047e-a993-9322-1a36e600b79a, 'name': SearchDatastore_Task, 'duration_secs': 0.019788} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.685612] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.685786] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 23f2ad6c-ea98-4a32-a79a-75cec6fc925e/23f2ad6c-ea98-4a32-a79a-75cec6fc925e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 666.686051] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f1a0d34-a593-4dc7-9fce-8d271888916e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.693658] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 666.693658] env[68244]: value = "task-2779901" [ 666.693658] env[68244]: _type = "Task" [ 666.693658] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.707946] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779901, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.751252] env[68244]: DEBUG nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 666.759068] env[68244]: DEBUG nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 666.837550] env[68244]: DEBUG nova.compute.manager [req-a7ae97bf-d542-4549-89c2-ad554b39f919 req-608da878-18d0-45b7-a013-bca2fa13b541 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Received event network-vif-plugged-3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 666.837999] env[68244]: DEBUG oslo_concurrency.lockutils [req-a7ae97bf-d542-4549-89c2-ad554b39f919 req-608da878-18d0-45b7-a013-bca2fa13b541 service nova] Acquiring lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.838225] env[68244]: DEBUG oslo_concurrency.lockutils [req-a7ae97bf-d542-4549-89c2-ad554b39f919 req-608da878-18d0-45b7-a013-bca2fa13b541 service nova] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.838487] env[68244]: DEBUG oslo_concurrency.lockutils [req-a7ae97bf-d542-4549-89c2-ad554b39f919 req-608da878-18d0-45b7-a013-bca2fa13b541 service nova] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.838487] env[68244]: DEBUG nova.compute.manager [req-a7ae97bf-d542-4549-89c2-ad554b39f919 req-608da878-18d0-45b7-a013-bca2fa13b541 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] No waiting events found dispatching network-vif-plugged-3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 666.838928] env[68244]: WARNING nova.compute.manager [req-a7ae97bf-d542-4549-89c2-ad554b39f919 req-608da878-18d0-45b7-a013-bca2fa13b541 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Received unexpected event network-vif-plugged-3a4a896b-0463-43a3-8487-d50328142090 for instance with vm_state building and task_state spawning. [ 666.857055] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779898, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.953410] env[68244]: DEBUG nova.compute.manager [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Received event network-vif-plugged-455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 666.957518] env[68244]: DEBUG oslo_concurrency.lockutils [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] Acquiring lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.957518] env[68244]: DEBUG oslo_concurrency.lockutils [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.957518] env[68244]: DEBUG oslo_concurrency.lockutils [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.957518] env[68244]: DEBUG nova.compute.manager [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] No waiting events found dispatching network-vif-plugged-455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 666.957518] env[68244]: WARNING nova.compute.manager [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Received unexpected event network-vif-plugged-455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 for instance with vm_state building and task_state spawning. [ 666.957861] env[68244]: DEBUG nova.compute.manager [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Received event network-changed-455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 666.957861] env[68244]: DEBUG nova.compute.manager [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Refreshing instance network info cache due to event network-changed-455f4a3c-bc0d-49b8-9c1f-685ca84e33a4. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 666.957861] env[68244]: DEBUG oslo_concurrency.lockutils [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] Acquiring lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.957861] env[68244]: DEBUG oslo_concurrency.lockutils [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] Acquired lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.957861] env[68244]: DEBUG nova.network.neutron [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Refreshing network info cache for port 455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 666.977283] env[68244]: DEBUG oslo_vmware.api [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Task: {'id': task-2779900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360386} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.977713] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.978020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 666.978339] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.979041] env[68244]: INFO nova.compute.manager [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Took 1.13 seconds to destroy the instance on the hypervisor. [ 666.979041] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 666.979635] env[68244]: DEBUG nova.compute.manager [-] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 666.979849] env[68244]: DEBUG nova.network.neutron [-] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.053874] env[68244]: DEBUG nova.network.neutron [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Successfully created port: e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.067506] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.067785] env[68244]: DEBUG nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Instance network_info: |[{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.068582] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:95:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a4a896b-0463-43a3-8487-d50328142090', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.088410] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating folder: Project (207109eb01bd42b081cc66385789ab80). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.091528] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00f17b25-e01b-436a-8cd0-7a114336ea0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.105233] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Created folder: Project (207109eb01bd42b081cc66385789ab80) in parent group-v558876. [ 667.105441] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating folder: Instances. Parent ref: group-v558901. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.105707] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0c9cbad-6c1d-4adf-94f3-0e2cffd5cf00 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.120365] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Created folder: Instances in parent group-v558901. [ 667.120784] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.120927] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.121119] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27e46688-498c-4cfd-9c4a-ed9213206bec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.157379] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.157379] env[68244]: value = "task-2779904" [ 667.157379] env[68244]: _type = "Task" [ 667.157379] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.172522] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779904, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.210690] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779901, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.218305] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "59b0dd89-0093-4e50-9428-8db5c7fd429d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.218627] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.225474] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.225722] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.226643] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.226643] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.226643] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.226643] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.226643] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.227171] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.227171] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.227171] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.227571] env[68244]: DEBUG nova.virt.hardware [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.228889] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3531f8ee-8d3d-4d80-afb5-b15a7cbcec5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.241498] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19145096-521a-426e-ad28-ba99c8e655ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.274136] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.280046] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.284094] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.286325] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a65782-a56b-4bef-aef9-a836eb3faa78 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.289387] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-152e747a-78a5-4eed-af9e-72e708b6aedc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.307592] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9341f5f-9670-4095-bb05-590e7769b81f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.310829] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.310829] env[68244]: value = "task-2779905" [ 667.310829] env[68244]: _type = "Task" [ 667.310829] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.311858] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.342463] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91d5c24-04c3-4970-8080-ce7ffcad9b82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.349205] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779905, 'name': CreateVM_Task} progress is 15%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.360321] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb745e4-d0c3-4aba-b1cc-60b690411015 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.364927] env[68244]: DEBUG oslo_vmware.api [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779898, 'name': PowerOnVM_Task, 'duration_secs': 0.790607} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.365200] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.365389] env[68244]: INFO nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Took 10.44 seconds to spawn the instance on the hypervisor. [ 667.365560] env[68244]: DEBUG nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.367076] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b74a6b-ca56-406c-9666-d35391143a57 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.380523] env[68244]: DEBUG nova.compute.provider_tree [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.664532] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779904, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.705567] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779901, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629253} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.705881] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 23f2ad6c-ea98-4a32-a79a-75cec6fc925e/23f2ad6c-ea98-4a32-a79a-75cec6fc925e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 667.706183] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 667.706469] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d932b412-c912-4410-aa18-434ee6daed27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.714902] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 667.714902] env[68244]: value = "task-2779906" [ 667.714902] env[68244]: _type = "Task" [ 667.714902] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.725223] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779906, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.785620] env[68244]: DEBUG nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 667.821474] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.821717] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.822047] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.822047] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.822232] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.822433] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.822546] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.822741] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.822913] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.823111] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.823317] env[68244]: DEBUG nova.virt.hardware [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.831026] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29ec016-b5c4-475f-b1b4-5f135507da7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.831026] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779905, 'name': CreateVM_Task, 'duration_secs': 0.500254} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.831026] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 667.831244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.831244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.831554] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 667.831795] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fbdf6f3-5986-4b06-805e-8b34410929a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.837151] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c82b73-e101-45b1-a928-ba960ca27b04 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.842502] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 667.842502] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525e870c-1f24-aaf7-efb5-294aba756299" [ 667.842502] env[68244]: _type = "Task" [ 667.842502] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.860173] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525e870c-1f24-aaf7-efb5-294aba756299, 'name': SearchDatastore_Task, 'duration_secs': 0.014979} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.860487] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.860732] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 667.860979] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.861265] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.861343] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 667.861621] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f249e6a-8b98-45dd-b94d-cd04ee6f22f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.871333] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 667.871510] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 667.872281] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83f1749d-7df6-4d9f-b63e-5e22e5f98bde {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.878476] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 667.878476] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dc0f48-321c-0ec1-2a93-80d416367c3a" [ 667.878476] env[68244]: _type = "Task" [ 667.878476] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.887209] env[68244]: DEBUG nova.scheduler.client.report [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.896020] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dc0f48-321c-0ec1-2a93-80d416367c3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.900514] env[68244]: INFO nova.compute.manager [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Took 24.45 seconds to build instance. [ 668.049485] env[68244]: DEBUG nova.network.neutron [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Successfully updated port: 86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 668.099277] env[68244]: DEBUG nova.network.neutron [-] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.119260] env[68244]: DEBUG nova.network.neutron [-] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.161652] env[68244]: DEBUG nova.network.neutron [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Updated VIF entry in instance network info cache for port 455f4a3c-bc0d-49b8-9c1f-685ca84e33a4. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 668.165020] env[68244]: DEBUG nova.network.neutron [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Updating instance_info_cache with network_info: [{"id": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "address": "fa:16:3e:3e:db:ab", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap455f4a3c-bc", "ovs_interfaceid": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.173325] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779904, 'name': CreateVM_Task, 'duration_secs': 0.524544} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.173325] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.173325] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.173533] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.173947] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.174182] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d27a66f8-06b5-47df-b982-b45d56a37cfa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.179512] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 668.179512] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182f69-0f90-f657-3bbc-63831757b29a" [ 668.179512] env[68244]: _type = "Task" [ 668.179512] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.190103] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182f69-0f90-f657-3bbc-63831757b29a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.227429] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779906, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103354} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.227705] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.228544] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04044103-6570-42cc-b1bd-1f220e71a4f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.254634] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 23f2ad6c-ea98-4a32-a79a-75cec6fc925e/23f2ad6c-ea98-4a32-a79a-75cec6fc925e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.255172] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78598926-3a7c-4190-b265-5b0794682f3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.276840] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 668.276840] env[68244]: value = "task-2779907" [ 668.276840] env[68244]: _type = "Task" [ 668.276840] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.287357] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779907, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.389361] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dc0f48-321c-0ec1-2a93-80d416367c3a, 'name': SearchDatastore_Task, 'duration_secs': 0.026973} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.389361] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3417f6d6-2dca-4806-b710-fffa357cff87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.395016] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 668.395016] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52631020-911a-3a7f-df2f-c35b8ea005aa" [ 668.395016] env[68244]: _type = "Task" [ 668.395016] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.398356] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.398804] env[68244]: DEBUG nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 668.401922] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.476s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.403902] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc285824-3cba-4924-b0a0-21dc447edf89 tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "aebd1200-ae52-4537-a677-24b57b581517" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.963s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.408302] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52631020-911a-3a7f-df2f-c35b8ea005aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.556239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.556322] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.556513] env[68244]: DEBUG nova.network.neutron [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.604720] env[68244]: INFO nova.compute.manager [-] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Took 1.99 seconds to deallocate network for instance. [ 668.623037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.623037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.623037] env[68244]: INFO nova.compute.manager [-] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Took 1.64 seconds to deallocate network for instance. [ 668.670206] env[68244]: DEBUG oslo_concurrency.lockutils [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] Releasing lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.670206] env[68244]: DEBUG nova.compute.manager [req-92a82076-f93f-4643-b9e4-731e328b5101 req-37d2be0b-5afb-42e7-bc1f-85c70a53547a service nova] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Received event network-vif-deleted-8289ebb7-2b25-4ad3-bda3-0609581f96be {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 668.696416] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182f69-0f90-f657-3bbc-63831757b29a, 'name': SearchDatastore_Task, 'duration_secs': 0.056808} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.696416] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.696416] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.696416] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.786781] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779907, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.905593] env[68244]: DEBUG nova.compute.utils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.924229] env[68244]: DEBUG nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 668.924229] env[68244]: DEBUG nova.network.neutron [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.926709] env[68244]: DEBUG nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 668.929924] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52631020-911a-3a7f-df2f-c35b8ea005aa, 'name': SearchDatastore_Task, 'duration_secs': 0.024604} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.930931] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.931193] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 668.931824] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.932044] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 668.932855] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e341b16-4556-4d9b-bcea-645361ad1676 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.936964] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3a5db81-d1b5-49b8-8791-9f888f0cfaa6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.945182] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 668.945182] env[68244]: value = "task-2779908" [ 668.945182] env[68244]: _type = "Task" [ 668.945182] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.946774] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 668.947588] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 668.951658] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-016ac690-7600-4b2e-a545-78fec6858df7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.961168] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.962128] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 668.962128] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5267be82-3cf7-e5f5-dc4c-69dd0d9ebe91" [ 668.962128] env[68244]: _type = "Task" [ 668.962128] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.975584] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5267be82-3cf7-e5f5-dc4c-69dd0d9ebe91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.019344] env[68244]: DEBUG nova.policy [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e214afd52b24b3ead7805b158efe1fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2274129c89f94fdfbab47ca11d05db0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 669.119062] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.128866] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.274332] env[68244]: DEBUG nova.network.neutron [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.288226] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779907, 'name': ReconfigVM_Task, 'duration_secs': 0.63035} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.288470] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 23f2ad6c-ea98-4a32-a79a-75cec6fc925e/23f2ad6c-ea98-4a32-a79a-75cec6fc925e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.289225] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d3653ce-6fe0-4763-9722-2cd39688105a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.297518] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 669.297518] env[68244]: value = "task-2779909" [ 669.297518] env[68244]: _type = "Task" [ 669.297518] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.308705] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779909, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.425124] env[68244]: DEBUG nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 669.458267] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779908, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.475803] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5267be82-3cf7-e5f5-dc4c-69dd0d9ebe91, 'name': SearchDatastore_Task, 'duration_secs': 0.019518} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.476755] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-681c2626-68ca-404f-8609-57aabb165c55 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.480594] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f48156b9-0316-4a9c-9cf0-9dd9d7a932c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.480764] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 47330950-506d-41c7-b564-30f46a7025a7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 669.480897] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 3a4e045e-8e27-45e4-9c90-8aa16298a096 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481026] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 57504eac-0d7f-4fbe-b08c-6864713cca94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481143] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481308] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance c662b964-abc9-41af-85fd-ea1a540e1e23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481452] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance aebd1200-ae52-4537-a677-24b57b581517 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481576] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 23f2ad6c-ea98-4a32-a79a-75cec6fc925e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481681] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e8655168-1fe8-4590-90a3-2ad9438d7761 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481787] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.481894] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d81bdefa-9c23-413b-9670-bbb2139084f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.482025] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 669.488212] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.493500] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 669.493500] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa4b09-cbef-0d68-29dd-0f7785e6ee62" [ 669.493500] env[68244]: _type = "Task" [ 669.493500] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.502422] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa4b09-cbef-0d68-29dd-0f7785e6ee62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.581459] env[68244]: DEBUG nova.network.neutron [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Successfully created port: a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.773397] env[68244]: DEBUG nova.network.neutron [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Updating instance_info_cache with network_info: [{"id": "86448281-b3d4-4132-8a5e-1a366a1132e0", "address": "fa:16:3e:06:ab:d0", "network": {"id": "75a3fcbf-828f-44ef-a705-497aab0cf9ab", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-145153828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2339433c10b4813937eb9968a84324a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86448281-b3", "ovs_interfaceid": "86448281-b3d4-4132-8a5e-1a366a1132e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.810049] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779909, 'name': Rename_Task, 'duration_secs': 0.185076} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.810368] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.810722] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-820dc983-3945-4a03-a272-e6544c8b90c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.819797] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 669.819797] env[68244]: value = "task-2779910" [ 669.819797] env[68244]: _type = "Task" [ 669.819797] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.831867] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779910, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.963848] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.774696} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.964214] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 669.964438] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 669.964751] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fefffb4-a15d-4c1a-9ae5-b12584354c2c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.975607] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 669.975607] env[68244]: value = "task-2779911" [ 669.975607] env[68244]: _type = "Task" [ 669.975607] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.986868] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.991165] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 3776b39a-d10b-4068-8b4b-5dc25798e088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 670.009868] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa4b09-cbef-0d68-29dd-0f7785e6ee62, 'name': SearchDatastore_Task, 'duration_secs': 0.060602} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.009868] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.009868] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e8655168-1fe8-4590-90a3-2ad9438d7761/e8655168-1fe8-4590-90a3-2ad9438d7761.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 670.009868] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-326b028c-906b-432c-822d-4e30337b6440 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.019187] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 670.019187] env[68244]: value = "task-2779912" [ 670.019187] env[68244]: _type = "Task" [ 670.019187] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.040282] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.073039] env[68244]: DEBUG nova.network.neutron [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Successfully updated port: e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 670.128802] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.129213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.171683] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "cb607c5e-797d-4e52-9ba4-66113718dacc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.171793] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.279891] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Releasing lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.280250] env[68244]: DEBUG nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Instance network_info: |[{"id": "86448281-b3d4-4132-8a5e-1a366a1132e0", "address": "fa:16:3e:06:ab:d0", "network": {"id": "75a3fcbf-828f-44ef-a705-497aab0cf9ab", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-145153828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2339433c10b4813937eb9968a84324a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86448281-b3", "ovs_interfaceid": "86448281-b3d4-4132-8a5e-1a366a1132e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 670.280696] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:ab:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86448281-b3d4-4132-8a5e-1a366a1132e0', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.288761] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Creating folder: Project (b2339433c10b4813937eb9968a84324a). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.290345] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df59dc91-5c76-4bc2-ae5a-8b07de0941d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.293762] env[68244]: DEBUG nova.compute.manager [None req-78179ff1-8dd7-40b1-94d1-933756ae1e15 tempest-ServerDiagnosticsTest-1900143629 tempest-ServerDiagnosticsTest-1900143629-project-admin] [instance: aebd1200-ae52-4537-a677-24b57b581517] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.295369] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de56a96-db06-4033-b1c1-9a0fae51c26f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.304086] env[68244]: INFO nova.compute.manager [None req-78179ff1-8dd7-40b1-94d1-933756ae1e15 tempest-ServerDiagnosticsTest-1900143629 tempest-ServerDiagnosticsTest-1900143629-project-admin] [instance: aebd1200-ae52-4537-a677-24b57b581517] Retrieving diagnostics [ 670.307021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a483ac15-6e2c-4b04-96cd-e830b338942f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.310067] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Created folder: Project (b2339433c10b4813937eb9968a84324a) in parent group-v558876. [ 670.310300] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Creating folder: Instances. Parent ref: group-v558905. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.310576] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65a10c2b-b993-4591-895a-eb24d6a19d1b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.351474] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Created folder: Instances in parent group-v558905. [ 670.351947] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.352740] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 670.353697] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69eaee60-ef83-4c9b-80a3-97a255ec5d4d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.391863] env[68244]: DEBUG oslo_vmware.api [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779910, 'name': PowerOnVM_Task, 'duration_secs': 0.481725} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.392628] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.392829] env[68244]: INFO nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Took 9.91 seconds to spawn the instance on the hypervisor. [ 670.393063] env[68244]: DEBUG nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.394156] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4738f5-381c-47cd-93ae-81d16e3145b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.398747] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.398747] env[68244]: value = "task-2779915" [ 670.398747] env[68244]: _type = "Task" [ 670.398747] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.414727] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779915, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.442780] env[68244]: DEBUG nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 670.487322] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100214} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.488471] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 670.491509] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44d6988-da31-4d85-b040-4e45abf06f18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.495732] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 670.521675] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 670.530825] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d58953f8-9bdc-48ae-abf7-b79a8b81deab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.550009] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 670.552886] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.552886] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 670.552886] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.552886] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 670.552886] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 670.553238] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 670.553238] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 670.553238] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 670.553238] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 670.553238] env[68244]: DEBUG nova.virt.hardware [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 670.553829] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f75001-159b-4800-afee-2be14ed52537 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.561622] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 670.561622] env[68244]: value = "task-2779916" [ 670.561622] env[68244]: _type = "Task" [ 670.561622] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.562157] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779912, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.570208] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604317cd-abef-4878-b7a9-59a3576d2667 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.578511] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.578511] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquired lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.578511] env[68244]: DEBUG nova.network.neutron [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.579707] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.833041] env[68244]: DEBUG nova.compute.manager [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Received event network-changed-3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 670.833183] env[68244]: DEBUG nova.compute.manager [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Refreshing instance network info cache due to event network-changed-3a4a896b-0463-43a3-8487-d50328142090. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 670.833421] env[68244]: DEBUG oslo_concurrency.lockutils [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] Acquiring lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.833613] env[68244]: DEBUG oslo_concurrency.lockutils [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] Acquired lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.833820] env[68244]: DEBUG nova.network.neutron [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Refreshing network info cache for port 3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.914088] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779915, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.916594] env[68244]: INFO nova.compute.manager [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Took 26.83 seconds to build instance. [ 670.998044] env[68244]: DEBUG nova.compute.manager [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Received event network-vif-plugged-86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 670.998669] env[68244]: DEBUG oslo_concurrency.lockutils [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] Acquiring lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.998893] env[68244]: DEBUG oslo_concurrency.lockutils [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.999160] env[68244]: DEBUG oslo_concurrency.lockutils [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.999404] env[68244]: DEBUG nova.compute.manager [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] No waiting events found dispatching network-vif-plugged-86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 670.999608] env[68244]: WARNING nova.compute.manager [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Received unexpected event network-vif-plugged-86448281-b3d4-4132-8a5e-1a366a1132e0 for instance with vm_state building and task_state spawning. [ 671.000015] env[68244]: DEBUG nova.compute.manager [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Received event network-changed-86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 671.000015] env[68244]: DEBUG nova.compute.manager [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Refreshing instance network info cache due to event network-changed-86448281-b3d4-4132-8a5e-1a366a1132e0. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 671.000311] env[68244]: DEBUG oslo_concurrency.lockutils [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] Acquiring lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.000311] env[68244]: DEBUG oslo_concurrency.lockutils [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] Acquired lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.000311] env[68244]: DEBUG nova.network.neutron [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Refreshing network info cache for port 86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.002143] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 03af8758-fba3-4173-b998-d9e6b3113f8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.036322] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779912, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.739274} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.036569] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e8655168-1fe8-4590-90a3-2ad9438d7761/e8655168-1fe8-4590-90a3-2ad9438d7761.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.036774] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.037042] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64122017-24c8-45fe-87cb-09f733029ac8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.044296] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 671.044296] env[68244]: value = "task-2779917" [ 671.044296] env[68244]: _type = "Task" [ 671.044296] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.053548] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.073569] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.139464] env[68244]: DEBUG nova.network.neutron [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.395632] env[68244]: DEBUG nova.network.neutron [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Updating instance_info_cache with network_info: [{"id": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "address": "fa:16:3e:ef:10:b5", "network": {"id": "17c063d9-ba44-409b-b637-59552bc5d906", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2127301711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6903b79c37d418aa5a767d9cb537ef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape39c0e00-ee", "ovs_interfaceid": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.410013] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779915, 'name': CreateVM_Task, 'duration_secs': 0.938716} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.410783] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 671.411478] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.411690] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.412076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 671.412458] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4c5a38c-05e1-429f-a120-3fbd4bc5f135 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.417082] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 671.417082] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ddac7c-6b39-50b7-7ee1-d4ac6e17cfa5" [ 671.417082] env[68244]: _type = "Task" [ 671.417082] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.421440] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c684ac2-a449-410b-bf27-87ffedb3cd7f tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.347s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.428383] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ddac7c-6b39-50b7-7ee1-d4ac6e17cfa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.509324] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 10957648-8618-4f2c-8b08-5468bca20cfc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.521665] env[68244]: DEBUG nova.network.neutron [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Successfully updated port: a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 671.574581] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072232} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.575337] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 671.576290] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54b1c74-cdc3-4293-8db5-67b82b75ee95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.582492] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.605062] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] e8655168-1fe8-4590-90a3-2ad9438d7761/e8655168-1fe8-4590-90a3-2ad9438d7761.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 671.605189] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-717bfeb0-731c-49c6-966d-83ef9d5e5559 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.626463] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 671.626463] env[68244]: value = "task-2779918" [ 671.626463] env[68244]: _type = "Task" [ 671.626463] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.635160] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779918, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.751516] env[68244]: DEBUG nova.network.neutron [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updated VIF entry in instance network info cache for port 3a4a896b-0463-43a3-8487-d50328142090. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.751891] env[68244]: DEBUG nova.network.neutron [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.898530] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Releasing lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.898864] env[68244]: DEBUG nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Instance network_info: |[{"id": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "address": "fa:16:3e:ef:10:b5", "network": {"id": "17c063d9-ba44-409b-b637-59552bc5d906", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2127301711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6903b79c37d418aa5a767d9cb537ef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape39c0e00-ee", "ovs_interfaceid": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 671.899293] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:10:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0685bd0b-3dbf-4a06-951c-c6a4726dd4b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e39c0e00-ee59-4d80-b276-18ca3d5cb12f', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 671.908089] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Creating folder: Project (c6903b79c37d418aa5a767d9cb537ef4). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.908396] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35f67c1d-2014-4f22-bb55-876fe8d251e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.922133] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Created folder: Project (c6903b79c37d418aa5a767d9cb537ef4) in parent group-v558876. [ 671.922325] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Creating folder: Instances. Parent ref: group-v558908. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.923161] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c702ce40-bd50-4254-9e4f-6e6240bf5b49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.928208] env[68244]: DEBUG nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 671.930722] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ddac7c-6b39-50b7-7ee1-d4ac6e17cfa5, 'name': SearchDatastore_Task, 'duration_secs': 0.116166} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.931388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.931611] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.931834] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.931978] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.932245] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 671.932419] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c399f9cd-fd6f-4c4f-8f31-f561591fc851 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.939414] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Created folder: Instances in parent group-v558908. [ 671.939414] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 671.939414] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 671.939414] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d099724-1418-43f4-bb3e-efbb0c6a2206 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.955610] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 671.955610] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 671.956397] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c584db5-f4c1-41ab-99fa-d792d26783f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.960262] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 671.960262] env[68244]: value = "task-2779921" [ 671.960262] env[68244]: _type = "Task" [ 671.960262] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.961476] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 671.961476] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f06cab-88f7-0872-e13e-5de81ed912a6" [ 671.961476] env[68244]: _type = "Task" [ 671.961476] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.968261] env[68244]: DEBUG nova.network.neutron [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Updated VIF entry in instance network info cache for port 86448281-b3d4-4132-8a5e-1a366a1132e0. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.968599] env[68244]: DEBUG nova.network.neutron [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Updating instance_info_cache with network_info: [{"id": "86448281-b3d4-4132-8a5e-1a366a1132e0", "address": "fa:16:3e:06:ab:d0", "network": {"id": "75a3fcbf-828f-44ef-a705-497aab0cf9ab", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-145153828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2339433c10b4813937eb9968a84324a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86448281-b3", "ovs_interfaceid": "86448281-b3d4-4132-8a5e-1a366a1132e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.975692] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779921, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.979611] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f06cab-88f7-0872-e13e-5de81ed912a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.012494] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d73f87d2-41b3-4396-b5b5-932f8c6bf626 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 672.024828] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.024983] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquired lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.025158] env[68244]: DEBUG nova.network.neutron [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.077397] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779916, 'name': ReconfigVM_Task, 'duration_secs': 1.166301} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.077865] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Reconfigured VM instance instance-00000006 to attach disk [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.078395] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35e32324-35a0-4049-b210-098c82e3f171 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.085047] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 672.085047] env[68244]: value = "task-2779922" [ 672.085047] env[68244]: _type = "Task" [ 672.085047] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.093438] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779922, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.139946] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779918, 'name': ReconfigVM_Task, 'duration_secs': 0.27947} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.140324] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Reconfigured VM instance instance-00000009 to attach disk [datastore2] e8655168-1fe8-4590-90a3-2ad9438d7761/e8655168-1fe8-4590-90a3-2ad9438d7761.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.141042] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aecd2635-724b-4951-9ae5-27405b2280c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.148110] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 672.148110] env[68244]: value = "task-2779923" [ 672.148110] env[68244]: _type = "Task" [ 672.148110] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.160864] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779923, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.254884] env[68244]: DEBUG oslo_concurrency.lockutils [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] Releasing lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.255179] env[68244]: DEBUG nova.compute.manager [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Received event network-vif-deleted-a9569b35-556f-4a24-9726-720578e04bf7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 672.255368] env[68244]: DEBUG nova.compute.manager [req-53ba5485-3dd6-4a08-983c-eef2856ca01a req-0a69c326-0791-4e20-be71-efb89e59e36a service nova] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Received event network-vif-deleted-a34fb069-2c07-4bff-b9e7-6f4cad7240ff {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 672.456748] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.472164] env[68244]: DEBUG oslo_concurrency.lockutils [req-368f129a-7a44-4e9b-a660-24cc6afcd64b req-1fd63529-4935-42b6-883f-2aeff09f6095 service nova] Releasing lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.479120] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f06cab-88f7-0872-e13e-5de81ed912a6, 'name': SearchDatastore_Task, 'duration_secs': 0.012629} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.479411] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779921, 'name': CreateVM_Task, 'duration_secs': 0.358987} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.479975] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.480195] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7f7af91-7fc2-4573-893e-6b9bf4d4f84b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.482806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.482973] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.483284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 672.483524] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45929fd5-4456-4f18-9397-b248028cc3f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.487805] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 672.487805] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528c5079-4229-d663-e678-2c6f38e22079" [ 672.487805] env[68244]: _type = "Task" [ 672.487805] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.489382] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 672.489382] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529ad3a8-c1c6-e718-07ec-7f98e3f01616" [ 672.489382] env[68244]: _type = "Task" [ 672.489382] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.502251] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528c5079-4229-d663-e678-2c6f38e22079, 'name': SearchDatastore_Task, 'duration_secs': 0.010731} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.504933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.505113] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f/f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 672.505688] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529ad3a8-c1c6-e718-07ec-7f98e3f01616, 'name': SearchDatastore_Task, 'duration_secs': 0.010681} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.505889] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38fd901d-8a2e-4a02-9b4a-da518e466f3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.507781] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.507994] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 672.508226] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.508370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.508538] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 672.508773] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddc46e4c-b92a-4974-a265-51c46fe7f18e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.519124] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 59b0dd89-0093-4e50-9428-8db5c7fd429d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 672.520111] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 672.520111] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 672.520810] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 672.520810] env[68244]: value = "task-2779924" [ 672.520810] env[68244]: _type = "Task" [ 672.520810] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.521031] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba8182b9-e4ae-4ecf-9325-328eb8165e7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.531353] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 672.531353] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525406ef-97e3-f2f1-1a76-b245fd4905e6" [ 672.531353] env[68244]: _type = "Task" [ 672.531353] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.533922] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.543008] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525406ef-97e3-f2f1-1a76-b245fd4905e6, 'name': SearchDatastore_Task, 'duration_secs': 0.00972} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.543756] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-406cb9d5-1d46-4230-a322-e3ad100c6b5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.548813] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 672.548813] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5267a591-f28d-c8e7-e8b2-9304eb0bf401" [ 672.548813] env[68244]: _type = "Task" [ 672.548813] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.556752] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5267a591-f28d-c8e7-e8b2-9304eb0bf401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.592684] env[68244]: DEBUG nova.network.neutron [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.601632] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779922, 'name': Rename_Task, 'duration_secs': 0.151694} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.601632] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 672.601632] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf2b4c18-b99c-4f78-8217-6dbffb5c0ea2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.606709] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 672.606709] env[68244]: value = "task-2779925" [ 672.606709] env[68244]: _type = "Task" [ 672.606709] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.620910] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.658781] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779923, 'name': Rename_Task, 'duration_secs': 0.197814} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.662652] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 672.662652] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-567f557c-eb88-4d49-ab94-55e9e1e318dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.667819] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 672.667819] env[68244]: value = "task-2779926" [ 672.667819] env[68244]: _type = "Task" [ 672.667819] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.676645] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.729362] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "aebd1200-ae52-4537-a677-24b57b581517" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.729640] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "aebd1200-ae52-4537-a677-24b57b581517" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.729839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "aebd1200-ae52-4537-a677-24b57b581517-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.730029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "aebd1200-ae52-4537-a677-24b57b581517-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.730203] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "aebd1200-ae52-4537-a677-24b57b581517-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.732768] env[68244]: INFO nova.compute.manager [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Terminating instance [ 672.778991] env[68244]: DEBUG nova.compute.manager [None req-f228e9e6-11e1-4436-8515-180b3109c63d tempest-ServerExternalEventsTest-1217950267 tempest-ServerExternalEventsTest-1217950267-project] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Received event network-changed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 672.779286] env[68244]: DEBUG nova.compute.manager [None req-f228e9e6-11e1-4436-8515-180b3109c63d tempest-ServerExternalEventsTest-1217950267 tempest-ServerExternalEventsTest-1217950267-project] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Refreshing instance network info cache due to event network-changed. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 672.780157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f228e9e6-11e1-4436-8515-180b3109c63d tempest-ServerExternalEventsTest-1217950267 tempest-ServerExternalEventsTest-1217950267-project] Acquiring lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.780379] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f228e9e6-11e1-4436-8515-180b3109c63d tempest-ServerExternalEventsTest-1217950267 tempest-ServerExternalEventsTest-1217950267-project] Acquired lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.780601] env[68244]: DEBUG nova.network.neutron [None req-f228e9e6-11e1-4436-8515-180b3109c63d tempest-ServerExternalEventsTest-1217950267 tempest-ServerExternalEventsTest-1217950267-project] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.841558] env[68244]: DEBUG nova.network.neutron [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Updating instance_info_cache with network_info: [{"id": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "address": "fa:16:3e:9b:b0:11", "network": {"id": "aa8ee9c8-b278-4d98-a636-22113f2660a3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1003260700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2274129c89f94fdfbab47ca11d05db0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f969bd9-e040-4b9b-85b2-7c61231584ad", "external-id": "nsx-vlan-transportzone-995", "segmentation_id": 995, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa07f522b-44", "ovs_interfaceid": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.027010] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance aa7c6967-cd55-47fc-a2f5-db6e8d2e0307 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 673.027217] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 673.027402] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 673.040271] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779924, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.064688] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5267a591-f28d-c8e7-e8b2-9304eb0bf401, 'name': SearchDatastore_Task, 'duration_secs': 0.008014} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.065448] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.065448] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d81bdefa-9c23-413b-9670-bbb2139084f7/d81bdefa-9c23-413b-9670-bbb2139084f7.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 673.065766] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba389ba9-cfa6-4122-ae70-a48fa960165a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.075906] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 673.075906] env[68244]: value = "task-2779927" [ 673.075906] env[68244]: _type = "Task" [ 673.075906] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.088194] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.121494] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779925, 'name': PowerOnVM_Task} progress is 76%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.186079] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779926, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.241415] env[68244]: DEBUG nova.compute.manager [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 673.241646] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 673.242674] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7487e8ae-666f-43fc-8020-0772c979205a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.256128] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 673.257456] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e282639b-affd-405f-ae5c-428a7aaee875 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.264656] env[68244]: DEBUG oslo_vmware.api [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 673.264656] env[68244]: value = "task-2779928" [ 673.264656] env[68244]: _type = "Task" [ 673.264656] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.276319] env[68244]: DEBUG oslo_vmware.api [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.348663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Releasing lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.348991] env[68244]: DEBUG nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Instance network_info: |[{"id": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "address": "fa:16:3e:9b:b0:11", "network": {"id": "aa8ee9c8-b278-4d98-a636-22113f2660a3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1003260700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2274129c89f94fdfbab47ca11d05db0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f969bd9-e040-4b9b-85b2-7c61231584ad", "external-id": "nsx-vlan-transportzone-995", "segmentation_id": 995, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa07f522b-44", "ovs_interfaceid": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 673.352075] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:b0:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f969bd9-e040-4b9b-85b2-7c61231584ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a07f522b-44ee-4a87-ac21-b5407bf48ff2', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 673.360153] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Creating folder: Project (2274129c89f94fdfbab47ca11d05db0b). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.360744] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cf7b2b2-881d-4a47-badb-b623c08d6712 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.374928] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Created folder: Project (2274129c89f94fdfbab47ca11d05db0b) in parent group-v558876. [ 673.375154] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Creating folder: Instances. Parent ref: group-v558911. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.378346] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7abb5a4-b8df-4ac8-bb2e-b6c5d0552b8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.388411] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Created folder: Instances in parent group-v558911. [ 673.388587] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.389826] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 673.389826] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d623aa9-8263-4419-87a4-e1cbab7e178a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.416038] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 673.416038] env[68244]: value = "task-2779931" [ 673.416038] env[68244]: _type = "Task" [ 673.416038] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.426541] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779931, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.428620] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8849ee-0b89-4c65-bb07-f94064260050 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.441287] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de16aa60-719c-47f1-976f-7678e36b5aed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.484476] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3221322f-b166-4da9-bc38-8f1a67ff33b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.493748] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463c5a59-fe1f-4fa0-a1df-8e44fcef041f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.509832] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.541739] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579169} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.542781] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f/f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 673.543103] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 673.543532] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-963a9195-4bdb-4ed0-a029-ab48c9ced5eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.552162] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 673.552162] env[68244]: value = "task-2779932" [ 673.552162] env[68244]: _type = "Task" [ 673.552162] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.561653] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779932, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.588283] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502315} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.589929] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d81bdefa-9c23-413b-9670-bbb2139084f7/d81bdefa-9c23-413b-9670-bbb2139084f7.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 673.590166] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 673.593099] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce8b283f-fd13-44e1-918a-8d193ee180bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.596152] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "fd4d5494-042b-457e-a826-dee4d87c0032" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.596497] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "fd4d5494-042b-457e-a826-dee4d87c0032" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.601416] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 673.601416] env[68244]: value = "task-2779933" [ 673.601416] env[68244]: _type = "Task" [ 673.601416] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.610702] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.619751] env[68244]: DEBUG oslo_vmware.api [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779925, 'name': PowerOnVM_Task, 'duration_secs': 0.727109} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.620068] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 673.620318] env[68244]: DEBUG nova.compute.manager [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 673.621092] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47b758a-f361-40c4-b879-c24b57ae07bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.680253] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779926, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.756362] env[68244]: DEBUG nova.network.neutron [None req-f228e9e6-11e1-4436-8515-180b3109c63d tempest-ServerExternalEventsTest-1217950267 tempest-ServerExternalEventsTest-1217950267-project] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Updating instance_info_cache with network_info: [{"id": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "address": "fa:16:3e:3e:db:ab", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap455f4a3c-bc", "ovs_interfaceid": "455f4a3c-bc0d-49b8-9c1f-685ca84e33a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.775636] env[68244]: DEBUG oslo_vmware.api [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779928, 'name': PowerOffVM_Task, 'duration_secs': 0.45928} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.776225] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 673.776407] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 673.776665] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d90e4ee1-9099-49ea-a8bf-623b1be67d6d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.843158] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 673.843158] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 673.843158] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Deleting the datastore file [datastore2] aebd1200-ae52-4537-a677-24b57b581517 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.843158] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cec1afb-109b-473a-8fe1-98ccd5c33b52 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.855523] env[68244]: DEBUG oslo_vmware.api [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for the task: (returnval){ [ 673.855523] env[68244]: value = "task-2779935" [ 673.855523] env[68244]: _type = "Task" [ 673.855523] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.865725] env[68244]: DEBUG oslo_vmware.api [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.925856] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779931, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.018022] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 674.063660] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779932, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131443} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.063958] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 674.065956] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b68e2b-9303-4415-8e67-272f5bb336e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.072275] env[68244]: DEBUG nova.compute.manager [None req-0d01168c-d9c4-4fae-b148-9ee557ed0a43 tempest-ServerDiagnosticsV248Test-68488678 tempest-ServerDiagnosticsV248Test-68488678-project-admin] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 674.088380] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c156d2a0-e535-41af-a3eb-136b3bbef5f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.102322] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f/f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 674.103779] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff62c7c1-e9f9-440e-897d-0bd2100ebf16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.126116] env[68244]: INFO nova.compute.manager [None req-0d01168c-d9c4-4fae-b148-9ee557ed0a43 tempest-ServerDiagnosticsV248Test-68488678 tempest-ServerDiagnosticsV248Test-68488678-project-admin] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Retrieving diagnostics [ 674.127134] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9bd1bf-0f75-4435-adf9-77ca9f61ed73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.134904] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 674.134904] env[68244]: value = "task-2779936" [ 674.134904] env[68244]: _type = "Task" [ 674.134904] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.139037] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.209354} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.140660] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 674.148413] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bbf842-c2b2-4102-8f37-e450bebfeebb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.151286] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.200605] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779936, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.210024] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] d81bdefa-9c23-413b-9670-bbb2139084f7/d81bdefa-9c23-413b-9670-bbb2139084f7.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 674.211071] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8004fb42-c03a-473e-90c6-321e823aa28d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.231654] env[68244]: DEBUG oslo_vmware.api [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2779926, 'name': PowerOnVM_Task, 'duration_secs': 1.025205} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.232275] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 674.232478] env[68244]: INFO nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Took 11.36 seconds to spawn the instance on the hypervisor. [ 674.232674] env[68244]: DEBUG nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 674.233502] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e4e53d-6a33-49a3-9277-140a11403c1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.237273] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 674.237273] env[68244]: value = "task-2779937" [ 674.237273] env[68244]: _type = "Task" [ 674.237273] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.250509] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779937, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.259640] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f228e9e6-11e1-4436-8515-180b3109c63d tempest-ServerExternalEventsTest-1217950267 tempest-ServerExternalEventsTest-1217950267-project] Releasing lock "refresh_cache-23f2ad6c-ea98-4a32-a79a-75cec6fc925e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.365860] env[68244]: DEBUG oslo_vmware.api [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.429788] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779931, 'name': CreateVM_Task, 'duration_secs': 0.849733} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.429788] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 674.429788] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.429788] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.429788] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 674.429963] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687cf809-899b-449e-8e75-a00bd858f7d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.433988] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 674.433988] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52898a42-2dc5-332e-1407-980dd160ea01" [ 674.433988] env[68244]: _type = "Task" [ 674.433988] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.442246] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52898a42-2dc5-332e-1407-980dd160ea01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.523417] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 674.523600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.122s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.523895] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.498s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.525506] env[68244]: INFO nova.compute.claims [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.555470] env[68244]: DEBUG nova.compute.manager [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Received event network-vif-plugged-a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 674.555691] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] Acquiring lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.556018] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.556018] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.556124] env[68244]: DEBUG nova.compute.manager [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] No waiting events found dispatching network-vif-plugged-a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 674.556325] env[68244]: WARNING nova.compute.manager [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Received unexpected event network-vif-plugged-a07f522b-44ee-4a87-ac21-b5407bf48ff2 for instance with vm_state building and task_state spawning. [ 674.556443] env[68244]: DEBUG nova.compute.manager [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Received event network-changed-a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 674.556598] env[68244]: DEBUG nova.compute.manager [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Refreshing instance network info cache due to event network-changed-a07f522b-44ee-4a87-ac21-b5407bf48ff2. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 674.556798] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] Acquiring lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.556939] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] Acquired lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.557180] env[68244]: DEBUG nova.network.neutron [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Refreshing network info cache for port a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.598452] env[68244]: DEBUG nova.compute.manager [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Received event network-vif-plugged-e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 674.598721] env[68244]: DEBUG oslo_concurrency.lockutils [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] Acquiring lock "d81bdefa-9c23-413b-9670-bbb2139084f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.598864] env[68244]: DEBUG oslo_concurrency.lockutils [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.599063] env[68244]: DEBUG oslo_concurrency.lockutils [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.599215] env[68244]: DEBUG nova.compute.manager [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] No waiting events found dispatching network-vif-plugged-e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 674.599408] env[68244]: WARNING nova.compute.manager [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Received unexpected event network-vif-plugged-e39c0e00-ee59-4d80-b276-18ca3d5cb12f for instance with vm_state building and task_state spawning. [ 674.599566] env[68244]: DEBUG nova.compute.manager [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Received event network-changed-e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 674.599825] env[68244]: DEBUG nova.compute.manager [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Refreshing instance network info cache due to event network-changed-e39c0e00-ee59-4d80-b276-18ca3d5cb12f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 674.599898] env[68244]: DEBUG oslo_concurrency.lockutils [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] Acquiring lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.601926] env[68244]: DEBUG oslo_concurrency.lockutils [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] Acquired lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.601926] env[68244]: DEBUG nova.network.neutron [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Refreshing network info cache for port e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.651423] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779936, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.755163] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779937, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.764464] env[68244]: INFO nova.compute.manager [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Took 28.61 seconds to build instance. [ 674.867541] env[68244]: DEBUG oslo_vmware.api [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Task: {'id': task-2779935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.779111} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.867647] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 674.867834] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 674.868390] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 674.868390] env[68244]: INFO nova.compute.manager [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [instance: aebd1200-ae52-4537-a677-24b57b581517] Took 1.63 seconds to destroy the instance on the hypervisor. [ 674.868521] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 674.868597] env[68244]: DEBUG nova.compute.manager [-] [instance: aebd1200-ae52-4537-a677-24b57b581517] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 674.868678] env[68244]: DEBUG nova.network.neutron [-] [instance: aebd1200-ae52-4537-a677-24b57b581517] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 674.944199] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52898a42-2dc5-332e-1407-980dd160ea01, 'name': SearchDatastore_Task, 'duration_secs': 0.041502} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.944537] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.944769] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 674.945373] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.945373] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.945373] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 674.946210] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49dd4e4a-e614-4a38-8c0a-f3a97cf65a0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.957038] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 674.957038] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 674.957038] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31137bc8-5305-400d-8ac0-95392bf121bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.964516] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 674.964516] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5225fa24-d350-6194-7948-9bb303c34054" [ 674.964516] env[68244]: _type = "Task" [ 674.964516] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.971447] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5225fa24-d350-6194-7948-9bb303c34054, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.033966] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.034423] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.034860] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.035224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.035733] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.044388] env[68244]: INFO nova.compute.manager [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Terminating instance [ 675.154321] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779936, 'name': ReconfigVM_Task, 'duration_secs': 0.770004} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.154605] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Reconfigured VM instance instance-0000000a to attach disk [datastore2] f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f/f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 675.155744] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5506d07b-d1d8-473e-838f-8a54b2e01bbb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.162440] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 675.162440] env[68244]: value = "task-2779938" [ 675.162440] env[68244]: _type = "Task" [ 675.162440] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.170797] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779938, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.248306] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779937, 'name': ReconfigVM_Task, 'duration_secs': 0.64412} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.252096] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Reconfigured VM instance instance-0000000b to attach disk [datastore2] d81bdefa-9c23-413b-9670-bbb2139084f7/d81bdefa-9c23-413b-9670-bbb2139084f7.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 675.252096] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db847df0-f9c2-4c45-bdb1-bf0f30ed4765 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.259564] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 675.259564] env[68244]: value = "task-2779939" [ 675.259564] env[68244]: _type = "Task" [ 675.259564] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.266045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6bfcbdaa-13e9-446e-8c28-1fedb91bb893 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.125s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.272418] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779939, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.350305] env[68244]: DEBUG nova.network.neutron [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Updated VIF entry in instance network info cache for port e39c0e00-ee59-4d80-b276-18ca3d5cb12f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.350674] env[68244]: DEBUG nova.network.neutron [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Updating instance_info_cache with network_info: [{"id": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "address": "fa:16:3e:ef:10:b5", "network": {"id": "17c063d9-ba44-409b-b637-59552bc5d906", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2127301711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6903b79c37d418aa5a767d9cb537ef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape39c0e00-ee", "ovs_interfaceid": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.474216] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5225fa24-d350-6194-7948-9bb303c34054, 'name': SearchDatastore_Task, 'duration_secs': 0.027344} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.474983] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98264b9a-2c71-4129-a09f-15a277e3363d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.480412] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 675.480412] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217d960-9375-053c-48df-9b019f916a95" [ 675.480412] env[68244]: _type = "Task" [ 675.480412] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.487583] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217d960-9375-053c-48df-9b019f916a95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.523050] env[68244]: DEBUG nova.network.neutron [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Updated VIF entry in instance network info cache for port a07f522b-44ee-4a87-ac21-b5407bf48ff2. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.523410] env[68244]: DEBUG nova.network.neutron [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Updating instance_info_cache with network_info: [{"id": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "address": "fa:16:3e:9b:b0:11", "network": {"id": "aa8ee9c8-b278-4d98-a636-22113f2660a3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1003260700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2274129c89f94fdfbab47ca11d05db0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f969bd9-e040-4b9b-85b2-7c61231584ad", "external-id": "nsx-vlan-transportzone-995", "segmentation_id": 995, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa07f522b-44", "ovs_interfaceid": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.548610] env[68244]: DEBUG nova.compute.manager [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 675.548836] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 675.549840] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac5baa7-3b49-40cd-9427-ee19df15dfff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.558988] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 675.559391] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad0192d0-630f-426c-bc6a-f648e2fd4796 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.565390] env[68244]: DEBUG oslo_vmware.api [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 675.565390] env[68244]: value = "task-2779940" [ 675.565390] env[68244]: _type = "Task" [ 675.565390] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.576878] env[68244]: DEBUG oslo_vmware.api [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.677608] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779938, 'name': Rename_Task, 'duration_secs': 0.217784} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.677876] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 675.678201] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b1fa1c0-b547-4b03-acf7-aa8cf5cbd9e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.685961] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 675.685961] env[68244]: value = "task-2779941" [ 675.685961] env[68244]: _type = "Task" [ 675.685961] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.693822] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779941, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.771763] env[68244]: DEBUG nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 675.775844] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779939, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.856024] env[68244]: DEBUG nova.network.neutron [-] [instance: aebd1200-ae52-4537-a677-24b57b581517] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.857188] env[68244]: DEBUG oslo_concurrency.lockutils [req-65f1b982-5ac7-4d02-8c52-937c63758ef6 req-03ab00f7-3526-4aaa-990c-aa768ae88c57 service nova] Releasing lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.899791] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d11853-54a5-4f03-87d0-27c0e5fea9e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.908486] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382b4f75-20ca-4834-b0e8-5b6bb25f85ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.941977] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8ee630-7c28-42e9-9fa8-cb9715f67934 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.951140] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a738cd20-205e-4b42-8be3-a93866387f18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.964592] env[68244]: DEBUG nova.compute.provider_tree [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.991314] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217d960-9375-053c-48df-9b019f916a95, 'name': SearchDatastore_Task, 'duration_secs': 0.031804} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.991631] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.991912] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c/8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 675.992234] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bfaebbd-1b88-4c72-bdbb-ac6ab650d91c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.999084] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 675.999084] env[68244]: value = "task-2779942" [ 675.999084] env[68244]: _type = "Task" [ 675.999084] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.008300] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.026830] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f28e95e-82bb-466f-a744-172dc75818d9 req-25461edd-de9f-4824-901a-7429f24dd268 service nova] Releasing lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.076023] env[68244]: DEBUG oslo_vmware.api [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779940, 'name': PowerOffVM_Task, 'duration_secs': 0.221654} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.077663] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 676.077663] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 676.077663] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-324d988b-85e7-46ce-9308-9a1e582cec83 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.133845] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 676.134145] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 676.134335] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Deleting the datastore file [datastore2] 23f2ad6c-ea98-4a32-a79a-75cec6fc925e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 676.138246] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44183206-e018-47a2-b309-d54c2a4c3316 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.145195] env[68244]: DEBUG oslo_vmware.api [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for the task: (returnval){ [ 676.145195] env[68244]: value = "task-2779944" [ 676.145195] env[68244]: _type = "Task" [ 676.145195] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.156069] env[68244]: DEBUG oslo_vmware.api [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.195656] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779941, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.286363] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779939, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.287398] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.287990] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.288681] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.289059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.289379] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.291726] env[68244]: INFO nova.compute.manager [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Terminating instance [ 676.300496] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.360965] env[68244]: INFO nova.compute.manager [-] [instance: aebd1200-ae52-4537-a677-24b57b581517] Took 1.49 seconds to deallocate network for instance. [ 676.469594] env[68244]: DEBUG nova.scheduler.client.report [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.509611] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487472} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.509882] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c/8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 676.510117] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.510523] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab0ae9e4-8c39-41d8-af1c-99b36670e390 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.517950] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 676.517950] env[68244]: value = "task-2779945" [ 676.517950] env[68244]: _type = "Task" [ 676.517950] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.526559] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.657746] env[68244]: DEBUG oslo_vmware.api [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Task: {'id': task-2779944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.399024} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.658881] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 676.658881] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 676.658881] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 676.658881] env[68244]: INFO nova.compute.manager [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 676.659075] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 676.659261] env[68244]: DEBUG nova.compute.manager [-] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 676.659349] env[68244]: DEBUG nova.network.neutron [-] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 676.696745] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779941, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.773879] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779939, 'name': Rename_Task, 'duration_secs': 1.26608} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.774290] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.774662] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52f32684-d392-4775-a5fb-dbf5541d6a99 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.781564] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 676.781564] env[68244]: value = "task-2779946" [ 676.781564] env[68244]: _type = "Task" [ 676.781564] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.799049] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.799049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "refresh_cache-ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.799049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquired lock "refresh_cache-ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.799186] env[68244]: DEBUG nova.network.neutron [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 676.868616] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.977025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.977025] env[68244]: DEBUG nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 676.978412] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.720s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.980693] env[68244]: INFO nova.compute.claims [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 677.030344] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112224} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.030629] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.031506] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2a45a8-f842-4a5e-97bc-90cafcda920d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.060200] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c/8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.061056] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04f068ff-47a4-4354-980e-a27836505e15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.085020] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 677.085020] env[68244]: value = "task-2779947" [ 677.085020] env[68244]: _type = "Task" [ 677.085020] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.092746] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779947, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.177337] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.177565] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.196668] env[68244]: DEBUG oslo_vmware.api [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2779941, 'name': PowerOnVM_Task, 'duration_secs': 1.065632} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.196907] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 677.197249] env[68244]: INFO nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Took 11.89 seconds to spawn the instance on the hypervisor. [ 677.197464] env[68244]: DEBUG nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 677.198248] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63b084a-2006-437a-bd67-78ae1f84e2e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.292343] env[68244]: INFO nova.compute.manager [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Rebuilding instance [ 677.294448] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779946, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.343738] env[68244]: DEBUG nova.compute.manager [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 677.344632] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107f0c21-5d74-46bc-92d8-da421a5ef51b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.411808] env[68244]: DEBUG nova.network.neutron [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.485860] env[68244]: DEBUG nova.compute.utils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 677.489508] env[68244]: DEBUG nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 677.489682] env[68244]: DEBUG nova.network.neutron [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 677.526968] env[68244]: DEBUG nova.policy [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3449af18f8384e0382863f3b2d2357ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eab15ba4f32a45d1832ce9d831d62f34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 677.549324] env[68244]: DEBUG nova.network.neutron [-] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.571953] env[68244]: DEBUG nova.network.neutron [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.593180] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779947, 'name': ReconfigVM_Task, 'duration_secs': 0.26046} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.593451] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c/8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.594109] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48412a31-faaa-4eb6-a4a1-76f66e5b6dc1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.600543] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 677.600543] env[68244]: value = "task-2779948" [ 677.600543] env[68244]: _type = "Task" [ 677.600543] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.609251] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779948, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.717357] env[68244]: INFO nova.compute.manager [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Took 26.24 seconds to build instance. [ 677.792626] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779946, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.806435] env[68244]: DEBUG nova.network.neutron [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Successfully created port: 57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.990036] env[68244]: DEBUG nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 678.053101] env[68244]: INFO nova.compute.manager [-] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Took 1.39 seconds to deallocate network for instance. [ 678.074058] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Releasing lock "refresh_cache-ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.074233] env[68244]: DEBUG nova.compute.manager [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 678.074437] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 678.076810] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e0e740-25ce-4e2f-8bdc-947c7d95fe67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.087792] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 678.088062] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3dcfd79a-521c-4d08-ad14-b1b8446150b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.096606] env[68244]: DEBUG oslo_vmware.api [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 678.096606] env[68244]: value = "task-2779949" [ 678.096606] env[68244]: _type = "Task" [ 678.096606] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.112164] env[68244]: DEBUG oslo_vmware.api [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.117588] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779948, 'name': Rename_Task, 'duration_secs': 0.130115} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.117588] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 678.117967] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7000dd36-ef77-4d7a-aa70-713feb768f88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.130698] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 678.130698] env[68244]: value = "task-2779950" [ 678.130698] env[68244]: _type = "Task" [ 678.130698] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.143411] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779950, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.219996] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7ecd533d-5093-4d59-9509-360063765454 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.752s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.294756] env[68244]: DEBUG oslo_vmware.api [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2779946, 'name': PowerOnVM_Task, 'duration_secs': 1.145596} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.295457] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.295457] env[68244]: INFO nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Took 10.51 seconds to spawn the instance on the hypervisor. [ 678.295663] env[68244]: DEBUG nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.296427] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef1d432-1d17-4c2d-a405-3843275df341 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.310826] env[68244]: DEBUG nova.compute.manager [req-a6649e4b-7ff4-4e2d-8e19-8dec755f22fb req-430f05b4-267a-46d6-9948-371ecd6ac801 service nova] [instance: aebd1200-ae52-4537-a677-24b57b581517] Received event network-vif-deleted-f1165d65-1146-43b0-8b0b-413cebb150aa {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 678.358096] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 678.361178] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d5e39a8-c88c-4237-b10e-c6be1dc2ec78 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.370996] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 678.370996] env[68244]: value = "task-2779951" [ 678.370996] env[68244]: _type = "Task" [ 678.370996] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.387359] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.403889] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148ed2f6-3715-4543-a97f-57fe4821c9ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.412061] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1422b6-dc72-4cca-8f47-3ec8b932fd64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.444553] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6746a64-c868-4a95-9e3c-6d3b34ee6b8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.451857] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1efd1d4-69b6-4825-b729-fcef03bbd072 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.466766] env[68244]: DEBUG nova.compute.provider_tree [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.564768] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.609237] env[68244]: DEBUG oslo_vmware.api [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779949, 'name': PowerOffVM_Task, 'duration_secs': 0.213629} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.609707] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 678.609990] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 678.610345] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3b1f6ce-6873-4878-b84c-9d8ca90857c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.636438] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 678.636686] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 678.636928] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Deleting the datastore file [datastore2] ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 678.637644] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81f2a7fb-7206-4997-a2e7-2d34bb151609 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.643116] env[68244]: DEBUG oslo_vmware.api [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2779950, 'name': PowerOnVM_Task, 'duration_secs': 0.487442} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.643826] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.644125] env[68244]: INFO nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Took 8.20 seconds to spawn the instance on the hypervisor. [ 678.644381] env[68244]: DEBUG nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.645234] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f53503e-0acf-405e-af20-61e269bca9ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.648912] env[68244]: DEBUG oslo_vmware.api [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for the task: (returnval){ [ 678.648912] env[68244]: value = "task-2779953" [ 678.648912] env[68244]: _type = "Task" [ 678.648912] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.661847] env[68244]: DEBUG oslo_vmware.api [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.727267] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 678.818609] env[68244]: INFO nova.compute.manager [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Took 19.56 seconds to build instance. [ 678.882687] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779951, 'name': PowerOffVM_Task, 'duration_secs': 0.130665} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.882988] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 678.883263] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 678.884076] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0ae7bf-ed10-4cb6-aec7-3a543c97d48d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.891745] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 678.891918] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17062b0c-8a24-4cbe-af49-3ecba41ea0bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.917975] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 678.917975] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 678.917975] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Deleting the datastore file [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 678.917975] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bc0cd65-9054-48b9-a61b-b7996dad5714 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.927804] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 678.927804] env[68244]: value = "task-2779955" [ 678.927804] env[68244]: _type = "Task" [ 678.927804] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.933599] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.972461] env[68244]: DEBUG nova.scheduler.client.report [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.003308] env[68244]: DEBUG nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 679.034352] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:20:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1630372366',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-750213850',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 679.034583] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.034767] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 679.034918] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.035843] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 679.035843] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 679.035843] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 679.035843] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 679.036387] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 679.036387] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 679.036387] env[68244]: DEBUG nova.virt.hardware [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 679.037449] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8244d6-0c54-4d52-98f6-c7df2c99c631 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.045698] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a4148e-2acb-4195-89b1-b67f63f062a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.158978] env[68244]: DEBUG oslo_vmware.api [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Task: {'id': task-2779953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211889} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.159516] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 679.159716] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 679.159896] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.160328] env[68244]: INFO nova.compute.manager [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Took 1.09 seconds to destroy the instance on the hypervisor. [ 679.160616] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 679.160828] env[68244]: DEBUG nova.compute.manager [-] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 679.160924] env[68244]: DEBUG nova.network.neutron [-] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.172078] env[68244]: INFO nova.compute.manager [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Took 19.76 seconds to build instance. [ 679.251611] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.311755] env[68244]: DEBUG nova.network.neutron [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Successfully updated port: 57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 679.321708] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75d63b37-240b-4dc3-9841-95eee1fdfcd7 tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.278s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.344724] env[68244]: DEBUG nova.network.neutron [-] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.436052] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274873} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.436052] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 679.436052] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 679.436052] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.482302] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.482847] env[68244]: DEBUG nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 679.485830] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.130s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.487409] env[68244]: INFO nova.compute.claims [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.676563] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64780c1f-aa5f-4114-9980-4756ed6dd9de tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.921s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.821029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.821029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.821029] env[68244]: DEBUG nova.network.neutron [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.828239] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.849797] env[68244]: DEBUG nova.network.neutron [-] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.994153] env[68244]: DEBUG nova.compute.utils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 679.999533] env[68244]: DEBUG nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 679.999698] env[68244]: DEBUG nova.network.neutron [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 680.093365] env[68244]: DEBUG nova.policy [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8c3f90f344a45c1861ef7fb32d4bfd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dcd37f739a7545e595cd423d24e810bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 680.173149] env[68244]: DEBUG nova.compute.manager [req-f43fe9bf-5584-40de-951c-bbab676c0e11 req-e6b7b605-66fe-4d8c-bf3a-1de24a735eb4 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Received event network-vif-plugged-57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 680.173380] env[68244]: DEBUG oslo_concurrency.lockutils [req-f43fe9bf-5584-40de-951c-bbab676c0e11 req-e6b7b605-66fe-4d8c-bf3a-1de24a735eb4 service nova] Acquiring lock "3776b39a-d10b-4068-8b4b-5dc25798e088-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.173585] env[68244]: DEBUG oslo_concurrency.lockutils [req-f43fe9bf-5584-40de-951c-bbab676c0e11 req-e6b7b605-66fe-4d8c-bf3a-1de24a735eb4 service nova] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.173874] env[68244]: DEBUG oslo_concurrency.lockutils [req-f43fe9bf-5584-40de-951c-bbab676c0e11 req-e6b7b605-66fe-4d8c-bf3a-1de24a735eb4 service nova] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.174141] env[68244]: DEBUG nova.compute.manager [req-f43fe9bf-5584-40de-951c-bbab676c0e11 req-e6b7b605-66fe-4d8c-bf3a-1de24a735eb4 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] No waiting events found dispatching network-vif-plugged-57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 680.174441] env[68244]: WARNING nova.compute.manager [req-f43fe9bf-5584-40de-951c-bbab676c0e11 req-e6b7b605-66fe-4d8c-bf3a-1de24a735eb4 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Received unexpected event network-vif-plugged-57f7dda3-98ee-46c7-871d-37b0add34372 for instance with vm_state building and task_state spawning. [ 680.180609] env[68244]: DEBUG nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.350239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.354933] env[68244]: INFO nova.compute.manager [-] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Took 1.19 seconds to deallocate network for instance. [ 680.364962] env[68244]: DEBUG nova.network.neutron [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.475521] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 680.475744] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.475904] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 680.476349] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.476349] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 680.476626] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 680.476713] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 680.476837] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 680.477019] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 680.477262] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 680.477442] env[68244]: DEBUG nova.virt.hardware [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 680.478335] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec0a8b2-54bb-46da-ac55-b1eea7dfa184 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.490818] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbfe176-4f83-4963-ac46-02d7661aeb56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.509508] env[68244]: DEBUG nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 680.514372] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.526367] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 680.527545] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.527780] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64dec635-061d-43f5-a56e-223ce301b1aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.548566] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.548566] env[68244]: value = "task-2779956" [ 680.548566] env[68244]: _type = "Task" [ 680.548566] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.556698] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779956, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.703557] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.754980] env[68244]: DEBUG nova.network.neutron [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Successfully created port: a8cb98b0-596a-4263-96fc-669e34e6e364 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.869019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.904832] env[68244]: DEBUG nova.network.neutron [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Updating instance_info_cache with network_info: [{"id": "57f7dda3-98ee-46c7-871d-37b0add34372", "address": "fa:16:3e:79:0a:05", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f7dda3-98", "ovs_interfaceid": "57f7dda3-98ee-46c7-871d-37b0add34372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.915208] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe031abe-34c4-46c4-acad-f598d9722179 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.922768] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90b4b5b-6032-41a6-9cf8-48c21a9b8a0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.959495] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61753cd6-cd1d-4c55-873b-d384c9388756 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.969236] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e04971-b0e9-4e20-ab62-68761ce5b739 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.988057] env[68244]: DEBUG nova.compute.provider_tree [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.058615] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779956, 'name': CreateVM_Task, 'duration_secs': 0.352466} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.060193] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 681.060722] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.061010] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.061464] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 681.061871] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e130b60-173e-4953-b8b7-64de1b0117cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.068133] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 681.068133] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e319cc-e60d-76c9-5d9a-10b51f29a774" [ 681.068133] env[68244]: _type = "Task" [ 681.068133] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.077145] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e319cc-e60d-76c9-5d9a-10b51f29a774, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.410045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Releasing lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.410045] env[68244]: DEBUG nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Instance network_info: |[{"id": "57f7dda3-98ee-46c7-871d-37b0add34372", "address": "fa:16:3e:79:0a:05", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f7dda3-98", "ovs_interfaceid": "57f7dda3-98ee-46c7-871d-37b0add34372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 681.410539] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:0a:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57f7dda3-98ee-46c7-871d-37b0add34372', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 681.419249] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Creating folder: Project (eab15ba4f32a45d1832ce9d831d62f34). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.419249] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e423ab6-5a2f-4c85-9e96-c9c16d5b1799 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.433130] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Created folder: Project (eab15ba4f32a45d1832ce9d831d62f34) in parent group-v558876. [ 681.433504] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Creating folder: Instances. Parent ref: group-v558915. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.434435] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1526c2de-29ae-42e1-b38f-9fcde89a4de5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.444424] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Created folder: Instances in parent group-v558915. [ 681.445171] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 681.445533] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 681.447201] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f34a98d-b8d3-455e-9395-39cab43bfa92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.471808] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 681.471808] env[68244]: value = "task-2779959" [ 681.471808] env[68244]: _type = "Task" [ 681.471808] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.479562] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779959, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.484055] env[68244]: DEBUG nova.compute.manager [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Received event network-vif-deleted-455f4a3c-bc0d-49b8-9c1f-685ca84e33a4 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 681.484261] env[68244]: DEBUG nova.compute.manager [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Received event network-changed-3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 681.484417] env[68244]: DEBUG nova.compute.manager [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Refreshing instance network info cache due to event network-changed-3a4a896b-0463-43a3-8487-d50328142090. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 681.484625] env[68244]: DEBUG oslo_concurrency.lockutils [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] Acquiring lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.484920] env[68244]: DEBUG oslo_concurrency.lockutils [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] Acquired lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.484920] env[68244]: DEBUG nova.network.neutron [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Refreshing network info cache for port 3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.492332] env[68244]: DEBUG nova.scheduler.client.report [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 681.535541] env[68244]: DEBUG nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 681.565303] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 681.565595] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.565837] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 681.566051] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.566257] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 681.566429] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 681.566651] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 681.566848] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 681.567033] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 681.568264] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 681.568264] env[68244]: DEBUG nova.virt.hardware [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 681.568374] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6947b119-7773-45b8-bfd8-30f75729e313 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.582914] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b10978-1368-4c4c-a0c0-c9411b6a9b8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.586860] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e319cc-e60d-76c9-5d9a-10b51f29a774, 'name': SearchDatastore_Task, 'duration_secs': 0.02767} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.587307] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.587375] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.587655] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.587726] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.587882] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.588575] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a14e83dd-2e09-4216-95fd-647778b51e6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.605922] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.606019] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.606916] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d0f0878-95f9-400e-abac-9ef3e5fccd99 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.611781] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 681.611781] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287031e-9310-c45a-82e6-64a1aa4e445a" [ 681.611781] env[68244]: _type = "Task" [ 681.611781] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.621219] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287031e-9310-c45a-82e6-64a1aa4e445a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.986184] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779959, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.994816] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.995873] env[68244]: DEBUG nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 681.998778] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.795s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.998999] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.003538] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.692s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.005897] env[68244]: INFO nova.compute.claims [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.049209] env[68244]: INFO nova.scheduler.client.report [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Deleted allocations for instance 47330950-506d-41c7-b564-30f46a7025a7 [ 682.130196] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287031e-9310-c45a-82e6-64a1aa4e445a, 'name': SearchDatastore_Task, 'duration_secs': 0.014265} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.133320] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ba76e4d-0586-4f0f-8474-bde178147e2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.141976] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 682.141976] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520af7e0-81d0-42bd-f1cb-8068ac13110b" [ 682.141976] env[68244]: _type = "Task" [ 682.141976] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.148276] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520af7e0-81d0-42bd-f1cb-8068ac13110b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.280938] env[68244]: DEBUG nova.network.neutron [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updated VIF entry in instance network info cache for port 3a4a896b-0463-43a3-8487-d50328142090. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 682.281357] env[68244]: DEBUG nova.network.neutron [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.484290] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779959, 'name': CreateVM_Task, 'duration_secs': 0.568657} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.484557] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 682.485479] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.485479] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.486291] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 682.486291] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25001639-01da-4cf9-b6aa-f1d2c4737c3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.490712] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 682.490712] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e85d3f-1381-ad50-de6c-f6cf8b69d60c" [ 682.490712] env[68244]: _type = "Task" [ 682.490712] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.498686] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e85d3f-1381-ad50-de6c-f6cf8b69d60c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.514273] env[68244]: DEBUG nova.compute.utils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 682.518032] env[68244]: DEBUG nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 682.518204] env[68244]: DEBUG nova.network.neutron [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 682.557258] env[68244]: DEBUG nova.policy [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6af77f00c84d4e99bea878bc30dcc361', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '821b99c053aa45b4b6b8fb09eb63aa73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 682.561840] env[68244]: DEBUG oslo_concurrency.lockutils [None req-26d7a2fe-9ba1-439f-accb-88a6dfe84409 tempest-ImagesNegativeTestJSON-470959141 tempest-ImagesNegativeTestJSON-470959141-project-member] Lock "47330950-506d-41c7-b564-30f46a7025a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.071s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.649791] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520af7e0-81d0-42bd-f1cb-8068ac13110b, 'name': SearchDatastore_Task, 'duration_secs': 0.028618} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.650061] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.650316] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.650569] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41a83a3e-ef72-4715-8718-e6d70fa63e4b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.658278] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 682.658278] env[68244]: value = "task-2779960" [ 682.658278] env[68244]: _type = "Task" [ 682.658278] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.669351] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779960, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.785440] env[68244]: DEBUG oslo_concurrency.lockutils [req-58a05536-251b-448a-8c03-c4fbf873dafd req-cf222ed4-58d7-4bba-a66a-95e3f451c334 service nova] Releasing lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.830466] env[68244]: DEBUG nova.network.neutron [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Successfully created port: a53c4339-1f00-4439-b65a-0583dcb486a9 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.001240] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e85d3f-1381-ad50-de6c-f6cf8b69d60c, 'name': SearchDatastore_Task, 'duration_secs': 0.033016} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.001574] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.001820] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.002070] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.002221] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.002402] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.002705] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b78ffa16-f21f-4a35-8b47-d39951c488f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.012596] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.012714] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 683.013420] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ee9c44a-b741-4586-b4b9-1218cd159b25 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.022174] env[68244]: DEBUG nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.035111] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 683.035111] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fc41ca-6a09-f7d2-4b4a-86ae75aa9cfd" [ 683.035111] env[68244]: _type = "Task" [ 683.035111] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.047840] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fc41ca-6a09-f7d2-4b4a-86ae75aa9cfd, 'name': SearchDatastore_Task, 'duration_secs': 0.01025} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.051255] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c47d540-ab94-41de-b088-b21eecc4792c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.054982] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 683.054982] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529e62ee-c544-8375-0f52-8b4bcc657b9d" [ 683.054982] env[68244]: _type = "Task" [ 683.054982] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.074411] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529e62ee-c544-8375-0f52-8b4bcc657b9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.170220] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779960, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.187906] env[68244]: DEBUG nova.compute.manager [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 683.188912] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ac0a44-1394-49c7-a7d8-20f69d44a63d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.239923] env[68244]: DEBUG nova.network.neutron [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Successfully updated port: a8cb98b0-596a-4263-96fc-669e34e6e364 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.418660] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2d73bf-6511-4e82-8169-37fd374b192a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.427200] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff389ada-7d95-4f8d-9508-6fd43047da90 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.461477] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195165a1-f597-40f9-af9e-795c71d06cbe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.469872] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94e0252-b860-4752-8f97-6458e0a0e4fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.485353] env[68244]: DEBUG nova.compute.provider_tree [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.565344] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529e62ee-c544-8375-0f52-8b4bcc657b9d, 'name': SearchDatastore_Task, 'duration_secs': 0.055827} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.566276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.566537] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 3776b39a-d10b-4068-8b4b-5dc25798e088/3776b39a-d10b-4068-8b4b-5dc25798e088.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 683.566785] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-353eabd1-c36b-42ca-93a8-aa59fe4a6acb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.575008] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 683.575008] env[68244]: value = "task-2779961" [ 683.575008] env[68244]: _type = "Task" [ 683.575008] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.582666] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.673048] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779960, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773827} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.673048] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.673048] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.673294] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb43bbc5-9920-4964-b5b7-438250a035b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.680257] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 683.680257] env[68244]: value = "task-2779962" [ 683.680257] env[68244]: _type = "Task" [ 683.680257] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.690808] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.702435] env[68244]: INFO nova.compute.manager [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] instance snapshotting [ 683.705019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0166dcd1-7d1e-4c3d-bcb0-60403b5bc525 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.724427] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd73830f-76b1-4bcf-8d8e-250d9a902c7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.743254] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "refresh_cache-511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.743401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired lock "refresh_cache-511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.743439] env[68244]: DEBUG nova.network.neutron [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.989658] env[68244]: DEBUG nova.scheduler.client.report [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 684.034293] env[68244]: DEBUG nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.060207] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.061097] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.061097] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.061097] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.061097] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.061097] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.061397] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.061471] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.064084] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.064084] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.064084] env[68244]: DEBUG nova.virt.hardware [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.064084] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10db828b-2bd7-4efb-92c3-f64e665eeec3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.071077] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828a72a4-0271-47ae-8d41-6888fcac870c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.096020] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779961, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.191019] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167842} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.192098] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 684.196024] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550c7ddf-d92b-4e5d-940e-877547aa4475 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.214631] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 684.216185] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27aeabf7-5998-42a0-a9e5-099f87d173b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.232703] env[68244]: DEBUG nova.compute.manager [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Received event network-changed-57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 684.232995] env[68244]: DEBUG nova.compute.manager [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Refreshing instance network info cache due to event network-changed-57f7dda3-98ee-46c7-871d-37b0add34372. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 684.236578] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Acquiring lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.236578] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Acquired lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.236578] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Refreshing network info cache for port 57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.237836] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 684.238751] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6eb99894-43a3-41ab-a92a-028aba3c3777 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.244580] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 684.244580] env[68244]: value = "task-2779963" [ 684.244580] env[68244]: _type = "Task" [ 684.244580] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.251669] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 684.251669] env[68244]: value = "task-2779964" [ 684.251669] env[68244]: _type = "Task" [ 684.251669] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.276485] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.277094] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779964, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.320323] env[68244]: DEBUG nova.network.neutron [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.499877] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.499877] env[68244]: DEBUG nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 684.503452] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.384s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.503452] env[68244]: DEBUG nova.objects.instance [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lazy-loading 'resources' on Instance uuid f48156b9-0316-4a9c-9cf0-9dd9d7a932c1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 684.591179] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779961, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.89558} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.591529] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 3776b39a-d10b-4068-8b4b-5dc25798e088/3776b39a-d10b-4068-8b4b-5dc25798e088.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 684.591799] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 684.592150] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4eed240d-6130-424d-a34a-efa43fce2f41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.603824] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 684.603824] env[68244]: value = "task-2779965" [ 684.603824] env[68244]: _type = "Task" [ 684.603824] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.613919] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.636153] env[68244]: DEBUG nova.network.neutron [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Successfully updated port: a53c4339-1f00-4439-b65a-0583dcb486a9 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 684.716597] env[68244]: DEBUG nova.network.neutron [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Updating instance_info_cache with network_info: [{"id": "a8cb98b0-596a-4263-96fc-669e34e6e364", "address": "fa:16:3e:3e:29:3c", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8cb98b0-59", "ovs_interfaceid": "a8cb98b0-596a-4263-96fc-669e34e6e364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.759287] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.768804] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779964, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.943162] env[68244]: DEBUG nova.compute.manager [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Received event network-changed-a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 684.943162] env[68244]: DEBUG nova.compute.manager [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Refreshing instance network info cache due to event network-changed-a07f522b-44ee-4a87-ac21-b5407bf48ff2. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 684.945803] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Acquiring lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.945803] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Acquired lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.945803] env[68244]: DEBUG nova.network.neutron [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Refreshing network info cache for port a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.008202] env[68244]: DEBUG nova.compute.utils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 685.012770] env[68244]: DEBUG nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 685.013268] env[68244]: DEBUG nova.network.neutron [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 685.056228] env[68244]: DEBUG nova.policy [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e2b78ca269843a0a5541e44727d807b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaf55a7bfa5948d1837855650c1c960b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 685.119314] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155986} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.119649] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 685.120666] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bedbd3c-bdc8-43d8-88b5-91993d41e65d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.145889] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 3776b39a-d10b-4068-8b4b-5dc25798e088/3776b39a-d10b-4068-8b4b-5dc25798e088.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 685.149083] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "refresh_cache-03af8758-fba3-4173-b998-d9e6b3113f8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.149298] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "refresh_cache-03af8758-fba3-4173-b998-d9e6b3113f8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.149507] env[68244]: DEBUG nova.network.neutron [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.150610] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-beac1471-1a23-4953-a71b-b030992ce3f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.176494] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 685.176494] env[68244]: value = "task-2779966" [ 685.176494] env[68244]: _type = "Task" [ 685.176494] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.187422] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779966, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.217438] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Updated VIF entry in instance network info cache for port 57f7dda3-98ee-46c7-871d-37b0add34372. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 685.217870] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Updating instance_info_cache with network_info: [{"id": "57f7dda3-98ee-46c7-871d-37b0add34372", "address": "fa:16:3e:79:0a:05", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f7dda3-98", "ovs_interfaceid": "57f7dda3-98ee-46c7-871d-37b0add34372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.219233] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Releasing lock "refresh_cache-511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.219498] env[68244]: DEBUG nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Instance network_info: |[{"id": "a8cb98b0-596a-4263-96fc-669e34e6e364", "address": "fa:16:3e:3e:29:3c", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8cb98b0-59", "ovs_interfaceid": "a8cb98b0-596a-4263-96fc-669e34e6e364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.222576] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:29:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8cb98b0-596a-4263-96fc-669e34e6e364', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.233056] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Creating folder: Project (dcd37f739a7545e595cd423d24e810bf). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.233056] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73685111-a7d9-4f25-8a74-abea22ffb12c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.246626] env[68244]: DEBUG nova.network.neutron [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.251798] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Created folder: Project (dcd37f739a7545e595cd423d24e810bf) in parent group-v558876. [ 685.252147] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Creating folder: Instances. Parent ref: group-v558919. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.255907] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-209c81d4-bf02-411f-9d99-c6922cc30118 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.269192] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779963, 'name': ReconfigVM_Task, 'duration_secs': 0.739529} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.270959] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Reconfigured VM instance instance-00000006 to attach disk [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23/c662b964-abc9-41af-85fd-ea1a540e1e23.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 685.271830] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Created folder: Instances in parent group-v558919. [ 685.272187] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.275357] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e0c052d-7b51-47b7-92bc-b0d51248c12f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.277444] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.277911] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779964, 'name': CreateSnapshot_Task, 'duration_secs': 0.85895} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.278177] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08cb7dcf-a482-4ba2-8509-6bde2a254fdd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.293709] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 685.299450] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c5c995-d741-47cf-9ad4-c283fa22f753 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.305096] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 685.305096] env[68244]: value = "task-2779969" [ 685.305096] env[68244]: _type = "Task" [ 685.305096] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.316241] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.316241] env[68244]: value = "task-2779970" [ 685.316241] env[68244]: _type = "Task" [ 685.316241] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.326915] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779969, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.332444] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779970, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.472841] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860105f8-5512-4414-83ee-fafcfe3443dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.480274] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba7c230-6b19-4cb9-b77e-a396589dfbcb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.514267] env[68244]: DEBUG nova.network.neutron [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Successfully created port: 91539a24-6b83-487e-9863-9e0ff0231dd9 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.516722] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c93971f-ab33-47f8-a0e3-d6edd409bdc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.519588] env[68244]: DEBUG nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 685.527842] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186c285b-7e06-4187-8da8-55c6cd276fbc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.545025] env[68244]: DEBUG nova.compute.provider_tree [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.652951] env[68244]: DEBUG nova.network.neutron [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Updating instance_info_cache with network_info: [{"id": "a53c4339-1f00-4439-b65a-0583dcb486a9", "address": "fa:16:3e:0a:5d:7e", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa53c4339-1f", "ovs_interfaceid": "a53c4339-1f00-4439-b65a-0583dcb486a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.690859] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.725270] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Releasing lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.725270] env[68244]: DEBUG nova.compute.manager [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Received event network-changed-86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 685.732283] env[68244]: DEBUG nova.compute.manager [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Refreshing instance network info cache due to event network-changed-86448281-b3d4-4132-8a5e-1a366a1132e0. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 685.732655] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Acquiring lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.732782] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Acquired lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.732954] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Refreshing network info cache for port 86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.826306] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 685.830020] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1bacec24-5c5c-413c-b15c-df9a7ef29fcf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.835496] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779969, 'name': Rename_Task, 'duration_secs': 0.303588} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.838832] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 685.839560] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4689ef3d-e376-44ac-97f3-06de01aee0ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.843931] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 685.843931] env[68244]: value = "task-2779971" [ 685.843931] env[68244]: _type = "Task" [ 685.843931] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.852508] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779970, 'name': CreateVM_Task, 'duration_secs': 0.454597} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.852508] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Waiting for the task: (returnval){ [ 685.852508] env[68244]: value = "task-2779972" [ 685.852508] env[68244]: _type = "Task" [ 685.852508] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.852762] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 685.853493] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.853493] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.854103] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 685.860133] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87ce948d-3ef8-414f-ad8a-7415993dea88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.862608] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779971, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.867492] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779972, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.868959] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 685.868959] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5239a24a-7b65-aad0-7f52-324930ad2efc" [ 685.868959] env[68244]: _type = "Task" [ 685.868959] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.877450] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5239a24a-7b65-aad0-7f52-324930ad2efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.048663] env[68244]: DEBUG nova.scheduler.client.report [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 686.155355] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "refresh_cache-03af8758-fba3-4173-b998-d9e6b3113f8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.155681] env[68244]: DEBUG nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Instance network_info: |[{"id": "a53c4339-1f00-4439-b65a-0583dcb486a9", "address": "fa:16:3e:0a:5d:7e", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa53c4339-1f", "ovs_interfaceid": "a53c4339-1f00-4439-b65a-0583dcb486a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 686.156150] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:5d:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a53c4339-1f00-4439-b65a-0583dcb486a9', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 686.166012] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating folder: Project (821b99c053aa45b4b6b8fb09eb63aa73). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 686.166375] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a437430-0c26-4888-a3a5-e9facd85b7e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.184613] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created folder: Project (821b99c053aa45b4b6b8fb09eb63aa73) in parent group-v558876. [ 686.184863] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating folder: Instances. Parent ref: group-v558923. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 686.186946] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fbc60f0-ed15-4b99-ac80-e6084fcb49cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.197015] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779966, 'name': ReconfigVM_Task, 'duration_secs': 0.67216} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.197756] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 3776b39a-d10b-4068-8b4b-5dc25798e088/3776b39a-d10b-4068-8b4b-5dc25798e088.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.198427] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4abb8920-b4f1-41ed-9caf-115bc2fa0e34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.205068] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 686.205068] env[68244]: value = "task-2779975" [ 686.205068] env[68244]: _type = "Task" [ 686.205068] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.210434] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created folder: Instances in parent group-v558923. [ 686.210434] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 686.210434] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 686.210693] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51154280-5965-42fc-8e90-4b1cbf073bf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.231166] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779975, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.238635] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 686.238635] env[68244]: value = "task-2779976" [ 686.238635] env[68244]: _type = "Task" [ 686.238635] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.247632] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779976, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.356277] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779971, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.371023] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779972, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.379943] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5239a24a-7b65-aad0-7f52-324930ad2efc, 'name': SearchDatastore_Task, 'duration_secs': 0.022097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.380296] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.380534] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.380814] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.380954] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.381144] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.381435] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b655df5-657b-4714-95fd-a5d0491eea48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.391385] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.391555] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.392916] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-388b2ceb-d7b0-44a4-9a8a-7185b0080687 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.401281] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 686.401281] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527695e2-c17f-424e-148b-0b8d82f25279" [ 686.401281] env[68244]: _type = "Task" [ 686.401281] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.411375] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527695e2-c17f-424e-148b-0b8d82f25279, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.433334] env[68244]: DEBUG nova.network.neutron [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Updated VIF entry in instance network info cache for port a07f522b-44ee-4a87-ac21-b5407bf48ff2. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.433742] env[68244]: DEBUG nova.network.neutron [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Updating instance_info_cache with network_info: [{"id": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "address": "fa:16:3e:9b:b0:11", "network": {"id": "aa8ee9c8-b278-4d98-a636-22113f2660a3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1003260700-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2274129c89f94fdfbab47ca11d05db0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f969bd9-e040-4b9b-85b2-7c61231584ad", "external-id": "nsx-vlan-transportzone-995", "segmentation_id": 995, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa07f522b-44", "ovs_interfaceid": "a07f522b-44ee-4a87-ac21-b5407bf48ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.530132] env[68244]: DEBUG nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 686.557737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.565116] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.433s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.565116] env[68244]: DEBUG nova.objects.instance [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Lazy-loading 'resources' on Instance uuid 3a4e045e-8e27-45e4-9c90-8aa16298a096 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 686.577657] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 686.577900] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 686.578559] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 686.579869] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 686.580762] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 686.581089] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 686.581723] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 686.582850] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 686.582850] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 686.582850] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 686.583702] env[68244]: DEBUG nova.virt.hardware [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 686.585501] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1f6aad-efb9-4a9a-bcd6-e1337e8c2988 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.596812] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472a01cd-9caa-43ba-8049-3bdb89d00b7c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.617596] env[68244]: INFO nova.scheduler.client.report [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Deleted allocations for instance f48156b9-0316-4a9c-9cf0-9dd9d7a932c1 [ 686.717834] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779975, 'name': Rename_Task, 'duration_secs': 0.190712} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.718158] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 686.718433] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3738fe50-6a45-48b8-9094-d094d2e72f5d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.731721] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 686.731721] env[68244]: value = "task-2779977" [ 686.731721] env[68244]: _type = "Task" [ 686.731721] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.747429] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.755470] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779976, 'name': CreateVM_Task, 'duration_secs': 0.458454} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.755668] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.757236] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.757587] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.757674] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 686.757958] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a36bbc60-e37e-4800-ad3e-8ae88f789145 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.763748] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 686.763748] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524afc9a-ad8e-b9c7-6091-2a3779cd0eb3" [ 686.763748] env[68244]: _type = "Task" [ 686.763748] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.773429] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524afc9a-ad8e-b9c7-6091-2a3779cd0eb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.856536] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779971, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.870547] env[68244]: DEBUG oslo_vmware.api [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Task: {'id': task-2779972, 'name': PowerOnVM_Task, 'duration_secs': 0.630945} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.870855] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 686.871132] env[68244]: DEBUG nova.compute.manager [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 686.871907] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6260f7a9-5082-4134-b2bf-f8ffde49e952 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.914412] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527695e2-c17f-424e-148b-0b8d82f25279, 'name': SearchDatastore_Task, 'duration_secs': 0.010919} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.916266] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fc936f0-04b7-4f6b-ad84-17a939e59c08 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.922483] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 686.922483] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e8a10a-3438-0faa-a862-03e534fe1461" [ 686.922483] env[68244]: _type = "Task" [ 686.922483] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.931685] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e8a10a-3438-0faa-a862-03e534fe1461, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.936572] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Releasing lock "refresh_cache-8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.938179] env[68244]: DEBUG nova.compute.manager [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Received event network-vif-plugged-a8cb98b0-596a-4263-96fc-669e34e6e364 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 686.938179] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Acquiring lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.938179] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.938179] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.938179] env[68244]: DEBUG nova.compute.manager [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] No waiting events found dispatching network-vif-plugged-a8cb98b0-596a-4263-96fc-669e34e6e364 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 686.938516] env[68244]: WARNING nova.compute.manager [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Received unexpected event network-vif-plugged-a8cb98b0-596a-4263-96fc-669e34e6e364 for instance with vm_state building and task_state spawning. [ 686.938516] env[68244]: DEBUG nova.compute.manager [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Received event network-changed-a8cb98b0-596a-4263-96fc-669e34e6e364 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 686.938516] env[68244]: DEBUG nova.compute.manager [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Refreshing instance network info cache due to event network-changed-a8cb98b0-596a-4263-96fc-669e34e6e364. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 686.938516] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Acquiring lock "refresh_cache-511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.938902] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Acquired lock "refresh_cache-511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.938902] env[68244]: DEBUG nova.network.neutron [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Refreshing network info cache for port a8cb98b0-596a-4263-96fc-669e34e6e364 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.128206] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Updated VIF entry in instance network info cache for port 86448281-b3d4-4132-8a5e-1a366a1132e0. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 687.128817] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Updating instance_info_cache with network_info: [{"id": "86448281-b3d4-4132-8a5e-1a366a1132e0", "address": "fa:16:3e:06:ab:d0", "network": {"id": "75a3fcbf-828f-44ef-a705-497aab0cf9ab", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-145153828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2339433c10b4813937eb9968a84324a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86448281-b3", "ovs_interfaceid": "86448281-b3d4-4132-8a5e-1a366a1132e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.139086] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022d9a8-3df8-482f-8ebb-7ce85eb65313 tempest-TenantUsagesTestJSON-1653947201 tempest-TenantUsagesTestJSON-1653947201-project-member] Lock "f48156b9-0316-4a9c-9cf0-9dd9d7a932c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.162s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.246322] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779977, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.280440] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524afc9a-ad8e-b9c7-6091-2a3779cd0eb3, 'name': SearchDatastore_Task, 'duration_secs': 0.021067} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.280846] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.281224] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 687.281478] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.357439] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779971, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.398972] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.437019] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e8a10a-3438-0faa-a862-03e534fe1461, 'name': SearchDatastore_Task, 'duration_secs': 0.020091} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.440391] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.440700] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f/511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.442726] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.442924] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.445434] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a50e0a67-3b3d-458a-ac47-e9d748871b34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.449984] env[68244]: DEBUG nova.compute.manager [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Received event network-vif-plugged-a53c4339-1f00-4439-b65a-0583dcb486a9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 687.450214] env[68244]: DEBUG oslo_concurrency.lockutils [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] Acquiring lock "03af8758-fba3-4173-b998-d9e6b3113f8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.454094] env[68244]: DEBUG oslo_concurrency.lockutils [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.454094] env[68244]: DEBUG oslo_concurrency.lockutils [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.454094] env[68244]: DEBUG nova.compute.manager [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] No waiting events found dispatching network-vif-plugged-a53c4339-1f00-4439-b65a-0583dcb486a9 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.454094] env[68244]: WARNING nova.compute.manager [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Received unexpected event network-vif-plugged-a53c4339-1f00-4439-b65a-0583dcb486a9 for instance with vm_state building and task_state spawning. [ 687.454094] env[68244]: DEBUG nova.compute.manager [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Received event network-changed-a53c4339-1f00-4439-b65a-0583dcb486a9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 687.454486] env[68244]: DEBUG nova.compute.manager [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Refreshing instance network info cache due to event network-changed-a53c4339-1f00-4439-b65a-0583dcb486a9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 687.454486] env[68244]: DEBUG oslo_concurrency.lockutils [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] Acquiring lock "refresh_cache-03af8758-fba3-4173-b998-d9e6b3113f8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.454486] env[68244]: DEBUG oslo_concurrency.lockutils [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] Acquired lock "refresh_cache-03af8758-fba3-4173-b998-d9e6b3113f8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.454486] env[68244]: DEBUG nova.network.neutron [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Refreshing network info cache for port a53c4339-1f00-4439-b65a-0583dcb486a9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.454486] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8855350-fa99-4c8d-8f18-4a229b48c2f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.471188] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 687.471188] env[68244]: value = "task-2779978" [ 687.471188] env[68244]: _type = "Task" [ 687.471188] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.472731] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.472900] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 687.479362] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dc525d2-c5fd-43df-8366-52c50f22cd9a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.490863] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 687.490863] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5236f1d4-f3be-9d13-dcb3-91b3eea3288a" [ 687.490863] env[68244]: _type = "Task" [ 687.490863] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.494703] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.509899] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5236f1d4-f3be-9d13-dcb3-91b3eea3288a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.552304] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15576b94-e053-4847-acf8-8424be82ef0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.559438] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e70e3c6-e676-4ec8-a11f-923b0445f34f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.603393] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05e53ac-27de-4921-b9b2-279c5075cb3d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.608949] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce884c2d-af09-47f3-ad30-ddc7f070bf12 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.623294] env[68244]: DEBUG nova.compute.provider_tree [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.636999] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Releasing lock "refresh_cache-f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.636999] env[68244]: DEBUG nova.compute.manager [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Received event network-changed-e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 687.637647] env[68244]: DEBUG nova.compute.manager [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Refreshing instance network info cache due to event network-changed-e39c0e00-ee59-4d80-b276-18ca3d5cb12f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 687.637647] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Acquiring lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.637794] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Acquired lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.637967] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Refreshing network info cache for port e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.711552] env[68244]: DEBUG nova.network.neutron [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Successfully updated port: 91539a24-6b83-487e-9863-9e0ff0231dd9 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.744339] env[68244]: DEBUG oslo_vmware.api [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2779977, 'name': PowerOnVM_Task, 'duration_secs': 0.779778} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.744646] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.744846] env[68244]: INFO nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Took 8.74 seconds to spawn the instance on the hypervisor. [ 687.745037] env[68244]: DEBUG nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.745856] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b241a2c4-2be2-4772-a6a2-8f952dd2c4cd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.860951] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779971, 'name': CloneVM_Task, 'duration_secs': 1.592678} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.861279] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Created linked-clone VM from snapshot [ 687.862227] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995042fe-c9fa-4999-a832-62f764bc3446 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.875546] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Uploading image e0eaa28c-0b70-4930-bb1f-e0b1bf79ebda {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 687.899228] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 687.900309] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a64d9dce-19d2-4c55-957b-52766f99b23f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.908925] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 687.908925] env[68244]: value = "task-2779979" [ 687.908925] env[68244]: _type = "Task" [ 687.908925] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.918127] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779979, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.982951] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779978, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.009025] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5236f1d4-f3be-9d13-dcb3-91b3eea3288a, 'name': SearchDatastore_Task, 'duration_secs': 0.028588} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.009025] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69be783b-8179-4cf6-8b13-727a8b62336f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.017997] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 688.017997] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523358a9-492b-d0e9-c421-237f74273ca5" [ 688.017997] env[68244]: _type = "Task" [ 688.017997] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.026966] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523358a9-492b-d0e9-c421-237f74273ca5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.126516] env[68244]: DEBUG nova.scheduler.client.report [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.215570] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-d73f87d2-41b3-4396-b5b5-932f8c6bf626" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.215724] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-d73f87d2-41b3-4396-b5b5-932f8c6bf626" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.215875] env[68244]: DEBUG nova.network.neutron [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.263729] env[68244]: INFO nova.compute.manager [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Took 27.27 seconds to build instance. [ 688.297332] env[68244]: DEBUG nova.network.neutron [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Updated VIF entry in instance network info cache for port a53c4339-1f00-4439-b65a-0583dcb486a9. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 688.298626] env[68244]: DEBUG nova.network.neutron [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Updating instance_info_cache with network_info: [{"id": "a53c4339-1f00-4439-b65a-0583dcb486a9", "address": "fa:16:3e:0a:5d:7e", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa53c4339-1f", "ovs_interfaceid": "a53c4339-1f00-4439-b65a-0583dcb486a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.422517] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779979, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.485055] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.749074} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.485826] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f/511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.485826] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.485826] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f185631f-a5cb-4d8b-a71f-38f836f13e47 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.492751] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 688.492751] env[68244]: value = "task-2779980" [ 688.492751] env[68244]: _type = "Task" [ 688.492751] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.501491] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779980, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.531918] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523358a9-492b-d0e9-c421-237f74273ca5, 'name': SearchDatastore_Task, 'duration_secs': 0.052117} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.532233] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.532617] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 03af8758-fba3-4173-b998-d9e6b3113f8c/03af8758-fba3-4173-b998-d9e6b3113f8c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 688.532973] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e2804a8-4df1-45b6-88c8-e698e625a57c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.543829] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 688.543829] env[68244]: value = "task-2779981" [ 688.543829] env[68244]: _type = "Task" [ 688.543829] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.554090] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.634507] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.639022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.150s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.641444] env[68244]: INFO nova.compute.claims [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.672180] env[68244]: INFO nova.scheduler.client.report [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Deleted allocations for instance 3a4e045e-8e27-45e4-9c90-8aa16298a096 [ 688.754494] env[68244]: DEBUG nova.network.neutron [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.766480] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d7a5c14-614a-426b-98e5-cffdc34dd278 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.351s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.800997] env[68244]: DEBUG oslo_concurrency.lockutils [req-858b9c7e-589b-40bc-bc23-3cf80542d06a req-ed75eaa4-b30f-4211-979b-917b3b9f7382 service nova] Releasing lock "refresh_cache-03af8758-fba3-4173-b998-d9e6b3113f8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.893090] env[68244]: DEBUG nova.network.neutron [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Updated VIF entry in instance network info cache for port a8cb98b0-596a-4263-96fc-669e34e6e364. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 688.893466] env[68244]: DEBUG nova.network.neutron [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Updating instance_info_cache with network_info: [{"id": "a8cb98b0-596a-4263-96fc-669e34e6e364", "address": "fa:16:3e:3e:29:3c", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8cb98b0-59", "ovs_interfaceid": "a8cb98b0-596a-4263-96fc-669e34e6e364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.920991] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779979, 'name': Destroy_Task, 'duration_secs': 0.757207} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.921301] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Destroyed the VM [ 688.921655] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 688.921927] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e8c1f2f9-9e8b-4156-8dc2-988127511392 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.928397] env[68244]: DEBUG nova.network.neutron [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Updating instance_info_cache with network_info: [{"id": "91539a24-6b83-487e-9863-9e0ff0231dd9", "address": "fa:16:3e:58:b3:e6", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91539a24-6b", "ovs_interfaceid": "91539a24-6b83-487e-9863-9e0ff0231dd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.931733] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 688.931733] env[68244]: value = "task-2779982" [ 688.931733] env[68244]: _type = "Task" [ 688.931733] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.945525] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779982, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.003640] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779980, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100686} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.004043] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.005382] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fabec4-ad86-49bd-9a27-a9fb5e1fe43a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.037151] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f/511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.037503] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-971c757e-0321-43c5-9bcb-6247ccb6c923 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.069634] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489804} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.071321] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 03af8758-fba3-4173-b998-d9e6b3113f8c/03af8758-fba3-4173-b998-d9e6b3113f8c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 689.071458] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 689.071762] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 689.071762] env[68244]: value = "task-2779983" [ 689.071762] env[68244]: _type = "Task" [ 689.071762] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.075021] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1fbcdf14-3523-4e2f-9fa8-6fa1d65a60c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.086617] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.092813] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 689.092813] env[68244]: value = "task-2779984" [ 689.092813] env[68244]: _type = "Task" [ 689.092813] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.103162] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779984, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.183643] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f316f616-ee5f-4ceb-b606-6d086a4c12ac tempest-DeleteServersAdminTestJSON-1420104198 tempest-DeleteServersAdminTestJSON-1420104198-project-admin] Lock "3a4e045e-8e27-45e4-9c90-8aa16298a096" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.850s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.271221] env[68244]: DEBUG nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 689.396318] env[68244]: DEBUG oslo_concurrency.lockutils [req-af5a9527-0bd6-4eb2-9a65-de3a1ebc556b req-ac7104a1-fa61-4d76-9995-8492c9a068a0 service nova] Releasing lock "refresh_cache-511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.436209] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-d73f87d2-41b3-4396-b5b5-932f8c6bf626" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.436613] env[68244]: DEBUG nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Instance network_info: |[{"id": "91539a24-6b83-487e-9863-9e0ff0231dd9", "address": "fa:16:3e:58:b3:e6", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91539a24-6b", "ovs_interfaceid": "91539a24-6b83-487e-9863-9e0ff0231dd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.437162] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:b3:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91539a24-6b83-487e-9863-9e0ff0231dd9', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.446917] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating folder: Project (aaf55a7bfa5948d1837855650c1c960b). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.451330] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-215dbc0f-4274-4345-808f-e30351644e1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.460168] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779982, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.462109] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created folder: Project (aaf55a7bfa5948d1837855650c1c960b) in parent group-v558876. [ 689.462328] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating folder: Instances. Parent ref: group-v558926. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.462959] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6dcf781a-c47e-432a-b411-2ebeeb145211 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.472691] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created folder: Instances in parent group-v558926. [ 689.472959] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.473184] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.473409] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f8f3361-d0f4-45df-ac94-33458f75cf0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.501297] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.501297] env[68244]: value = "task-2779987" [ 689.501297] env[68244]: _type = "Task" [ 689.501297] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.517628] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779987, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.552727] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Updated VIF entry in instance network info cache for port e39c0e00-ee59-4d80-b276-18ca3d5cb12f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 689.552727] env[68244]: DEBUG nova.network.neutron [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Updating instance_info_cache with network_info: [{"id": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "address": "fa:16:3e:ef:10:b5", "network": {"id": "17c063d9-ba44-409b-b637-59552bc5d906", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2127301711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6903b79c37d418aa5a767d9cb537ef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0685bd0b-3dbf-4a06-951c-c6a4726dd4b0", "external-id": "nsx-vlan-transportzone-661", "segmentation_id": 661, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape39c0e00-ee", "ovs_interfaceid": "e39c0e00-ee59-4d80-b276-18ca3d5cb12f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.587224] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779983, 'name': ReconfigVM_Task, 'duration_secs': 0.318532} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.587602] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f/511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 689.588878] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d290d62e-545c-42ab-b0a2-552d8607381d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.599973] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 689.599973] env[68244]: value = "task-2779988" [ 689.599973] env[68244]: _type = "Task" [ 689.599973] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.607615] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779984, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109446} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.608886] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.609970] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a9d54e-fe0b-40ad-bcda-cc0959678b36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.616336] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779988, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.638638] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 03af8758-fba3-4173-b998-d9e6b3113f8c/03af8758-fba3-4173-b998-d9e6b3113f8c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.638638] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77daa727-3231-48c1-9d1a-805c8202af92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.659821] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 689.659821] env[68244]: value = "task-2779989" [ 689.659821] env[68244]: _type = "Task" [ 689.659821] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.798263] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.949344] env[68244]: DEBUG oslo_vmware.api [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779982, 'name': RemoveSnapshot_Task, 'duration_secs': 0.595798} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.949619] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 690.013930] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779987, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.048525] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121ab74c-f2da-4162-91be-fb9962999a9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.056995] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ec1306-d2fa-4951-9fee-4763179d3b7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.060645] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdb55546-a7fb-44da-b601-7bb722787e7d req-0958cd52-bf92-407c-9395-9b6ef192570c service nova] Releasing lock "refresh_cache-d81bdefa-9c23-413b-9670-bbb2139084f7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.088166] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae00e7b-40c1-4133-a78a-bf9839d05d23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.095869] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b25665c-6218-48ae-bb61-9b85588d4709 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.114224] env[68244]: DEBUG nova.compute.provider_tree [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 690.118765] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779988, 'name': Rename_Task, 'duration_secs': 0.25518} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.119321] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.119649] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b0eff41-452b-44c0-9f3f-c22fc2886228 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.126378] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 690.126378] env[68244]: value = "task-2779990" [ 690.126378] env[68244]: _type = "Task" [ 690.126378] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.135080] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.170064] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779989, 'name': ReconfigVM_Task, 'duration_secs': 0.494619} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.170857] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 03af8758-fba3-4173-b998-d9e6b3113f8c/03af8758-fba3-4173-b998-d9e6b3113f8c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.171201] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecd18f5c-d99b-4859-9353-1fff4fb0f6ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.177304] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 690.177304] env[68244]: value = "task-2779991" [ 690.177304] env[68244]: _type = "Task" [ 690.177304] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.185804] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779991, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.456064] env[68244]: WARNING nova.compute.manager [None req-7a9339bd-0497-4094-a547-18b663320f07 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Image not found during snapshot: nova.exception.ImageNotFound: Image e0eaa28c-0b70-4930-bb1f-e0b1bf79ebda could not be found. [ 690.514240] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779987, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.637535] env[68244]: DEBUG oslo_vmware.api [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2779990, 'name': PowerOnVM_Task, 'duration_secs': 0.501982} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.637862] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 690.638094] env[68244]: INFO nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Took 9.10 seconds to spawn the instance on the hypervisor. [ 690.638855] env[68244]: DEBUG nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 690.639258] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45acbaee-7dfb-430b-9077-123b7431bd63 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.643564] env[68244]: ERROR nova.scheduler.client.report [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [req-07b37bec-f328-4df7-b32f-986e2d3b4e5f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-07b37bec-f328-4df7-b32f-986e2d3b4e5f"}]} [ 690.685488] env[68244]: DEBUG nova.scheduler.client.report [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 690.696818] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779991, 'name': Rename_Task, 'duration_secs': 0.186158} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.697277] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.697711] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e93b3d8-0d6e-4d96-a430-e60edd1bee89 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.705757] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 690.705757] env[68244]: value = "task-2779992" [ 690.705757] env[68244]: _type = "Task" [ 690.705757] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.712279] env[68244]: DEBUG nova.scheduler.client.report [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 690.712794] env[68244]: DEBUG nova.compute.provider_tree [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 690.725683] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779992, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.736172] env[68244]: DEBUG nova.scheduler.client.report [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 690.757944] env[68244]: DEBUG nova.scheduler.client.report [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 690.764020] env[68244]: DEBUG nova.compute.manager [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Received event network-vif-plugged-91539a24-6b83-487e-9863-9e0ff0231dd9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 690.764020] env[68244]: DEBUG oslo_concurrency.lockutils [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] Acquiring lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.764020] env[68244]: DEBUG oslo_concurrency.lockutils [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.764020] env[68244]: DEBUG oslo_concurrency.lockutils [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.764020] env[68244]: DEBUG nova.compute.manager [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] No waiting events found dispatching network-vif-plugged-91539a24-6b83-487e-9863-9e0ff0231dd9 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 690.764332] env[68244]: WARNING nova.compute.manager [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Received unexpected event network-vif-plugged-91539a24-6b83-487e-9863-9e0ff0231dd9 for instance with vm_state building and task_state spawning. [ 690.764463] env[68244]: DEBUG nova.compute.manager [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Received event network-changed-91539a24-6b83-487e-9863-9e0ff0231dd9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 690.764766] env[68244]: DEBUG nova.compute.manager [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Refreshing instance network info cache due to event network-changed-91539a24-6b83-487e-9863-9e0ff0231dd9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 690.765083] env[68244]: DEBUG oslo_concurrency.lockutils [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] Acquiring lock "refresh_cache-d73f87d2-41b3-4396-b5b5-932f8c6bf626" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.765329] env[68244]: DEBUG oslo_concurrency.lockutils [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] Acquired lock "refresh_cache-d73f87d2-41b3-4396-b5b5-932f8c6bf626" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.767084] env[68244]: DEBUG nova.network.neutron [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Refreshing network info cache for port 91539a24-6b83-487e-9863-9e0ff0231dd9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.811108] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "c662b964-abc9-41af-85fd-ea1a540e1e23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.811108] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "c662b964-abc9-41af-85fd-ea1a540e1e23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.811108] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "c662b964-abc9-41af-85fd-ea1a540e1e23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.811108] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "c662b964-abc9-41af-85fd-ea1a540e1e23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.811298] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "c662b964-abc9-41af-85fd-ea1a540e1e23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.813723] env[68244]: INFO nova.compute.manager [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Terminating instance [ 691.016188] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779987, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.170198] env[68244]: INFO nova.compute.manager [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Took 28.95 seconds to build instance. [ 691.175159] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a6a459-eded-4021-8d1e-84e66b016a73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.185334] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab15886-057b-4e19-a6db-a148aeeec232 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.230537] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5f03ab-03ec-46ab-bfe5-5ab231e82195 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.243647] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55c99aa-1f81-45ab-a172-b0bd4195b24a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.248030] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779992, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.260940] env[68244]: DEBUG nova.compute.provider_tree [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 691.326436] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "refresh_cache-c662b964-abc9-41af-85fd-ea1a540e1e23" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.326625] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquired lock "refresh_cache-c662b964-abc9-41af-85fd-ea1a540e1e23" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.326791] env[68244]: DEBUG nova.network.neutron [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.515365] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2779987, 'name': CreateVM_Task, 'duration_secs': 1.588197} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.515562] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 691.516290] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.516463] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.516967] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 691.517269] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dee2edbc-1b0c-4de8-8d72-b53d222a6065 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.522061] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 691.522061] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5283eb84-30db-87ac-9bfa-999434b3a422" [ 691.522061] env[68244]: _type = "Task" [ 691.522061] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.530272] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5283eb84-30db-87ac-9bfa-999434b3a422, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.672662] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b1dd2c3-7c91-44b4-8457-5ccfee58b157 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.471s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.738322] env[68244]: DEBUG oslo_vmware.api [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2779992, 'name': PowerOnVM_Task, 'duration_secs': 1.022219} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.738322] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.738322] env[68244]: INFO nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Took 7.70 seconds to spawn the instance on the hypervisor. [ 691.738322] env[68244]: DEBUG nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.739206] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d631162-da56-464e-a162-fe4d44e6eab6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.812799] env[68244]: DEBUG nova.scheduler.client.report [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 30 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 691.812799] env[68244]: DEBUG nova.compute.provider_tree [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 30 to 31 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 691.813263] env[68244]: DEBUG nova.compute.provider_tree [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 691.870337] env[68244]: DEBUG nova.network.neutron [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.999222] env[68244]: DEBUG nova.network.neutron [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.034294] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5283eb84-30db-87ac-9bfa-999434b3a422, 'name': SearchDatastore_Task, 'duration_secs': 0.027619} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.034615] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.034845] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.035088] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.035239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.035958] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.037303] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d82609b-6bb7-4033-9cdc-566289ac67e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.051681] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.051681] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.051681] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a958d7-3c4c-4de9-835e-0d8d755007a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.056710] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 692.056710] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520989f6-1bc9-09dc-805a-50bb2bdc1c4d" [ 692.056710] env[68244]: _type = "Task" [ 692.056710] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.071226] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520989f6-1bc9-09dc-805a-50bb2bdc1c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.009064} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.072264] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61965fa2-c5d6-471b-9446-b753f6df2eb1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.077926] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 692.077926] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52740237-dd31-5c3d-7bc9-3c6d09885a10" [ 692.077926] env[68244]: _type = "Task" [ 692.077926] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.086852] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52740237-dd31-5c3d-7bc9-3c6d09885a10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.262129] env[68244]: INFO nova.compute.manager [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Took 28.93 seconds to build instance. [ 692.322195] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.683s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.322195] env[68244]: DEBUG nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 692.329554] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.873s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.331058] env[68244]: INFO nova.compute.claims [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.366697] env[68244]: DEBUG nova.network.neutron [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Updated VIF entry in instance network info cache for port 91539a24-6b83-487e-9863-9e0ff0231dd9. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.367057] env[68244]: DEBUG nova.network.neutron [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Updating instance_info_cache with network_info: [{"id": "91539a24-6b83-487e-9863-9e0ff0231dd9", "address": "fa:16:3e:58:b3:e6", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91539a24-6b", "ovs_interfaceid": "91539a24-6b83-487e-9863-9e0ff0231dd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.507817] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Releasing lock "refresh_cache-c662b964-abc9-41af-85fd-ea1a540e1e23" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.508406] env[68244]: DEBUG nova.compute.manager [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 692.508406] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.509436] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9874bdd9-d3be-4826-91a9-e1eb9a88afe4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.519544] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 692.519813] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2d5244e-1438-4a45-ac33-e313973f8506 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.529175] env[68244]: DEBUG oslo_vmware.api [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 692.529175] env[68244]: value = "task-2779993" [ 692.529175] env[68244]: _type = "Task" [ 692.529175] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.542345] env[68244]: DEBUG oslo_vmware.api [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.593073] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52740237-dd31-5c3d-7bc9-3c6d09885a10, 'name': SearchDatastore_Task, 'duration_secs': 0.011061} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.593298] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.593555] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 692.593821] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89e44798-b476-42f8-9a05-9438ef0e7f27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.600338] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 692.600338] env[68244]: value = "task-2779994" [ 692.600338] env[68244]: _type = "Task" [ 692.600338] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.611174] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2779994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.763628] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8d14b81-cf90-4c13-9821-3ac525fbbddd tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.443s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.815018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "f270caad-1b02-4d5b-a435-37b77c05c4e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.815018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.836370] env[68244]: DEBUG nova.compute.utils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 692.838862] env[68244]: DEBUG nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 692.839036] env[68244]: DEBUG nova.network.neutron [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 692.873351] env[68244]: DEBUG oslo_concurrency.lockutils [req-53f8ae45-7804-4b4a-ad98-810824b6f31b req-4b8bdf46-64f0-4eb6-a062-c8312db33546 service nova] Releasing lock "refresh_cache-d73f87d2-41b3-4396-b5b5-932f8c6bf626" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.013997] env[68244]: DEBUG nova.policy [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8587147149b84a34bfbbd01e2bb637b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bf57141eff643a8b03f3b0576678ec1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 693.043453] env[68244]: DEBUG oslo_vmware.api [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779993, 'name': PowerOffVM_Task, 'duration_secs': 0.162601} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.043732] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 693.043900] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 693.044209] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4f51cf4-9498-41ef-8b59-88dd62607c87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.071398] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 693.071787] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 693.072032] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Deleting the datastore file [datastore2] c662b964-abc9-41af-85fd-ea1a540e1e23 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 693.072265] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f613a15-fed0-4eb2-a4e6-9c81f955f89a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.081869] env[68244]: DEBUG oslo_vmware.api [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for the task: (returnval){ [ 693.081869] env[68244]: value = "task-2779996" [ 693.081869] env[68244]: _type = "Task" [ 693.081869] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.089066] env[68244]: DEBUG oslo_vmware.api [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.122606] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2779994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503153} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.122683] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 693.122948] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 693.123239] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-233b2003-bed3-4079-8ba8-cfd93bddeb73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.132205] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 693.132205] env[68244]: value = "task-2779997" [ 693.132205] env[68244]: _type = "Task" [ 693.132205] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.140838] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2779997, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.318035] env[68244]: DEBUG nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.345825] env[68244]: DEBUG nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 693.592716] env[68244]: DEBUG oslo_vmware.api [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Task: {'id': task-2779996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107853} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.596524] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 693.596720] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 693.596890] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.597080] env[68244]: INFO nova.compute.manager [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Took 1.09 seconds to destroy the instance on the hypervisor. [ 693.597656] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.600389] env[68244]: DEBUG nova.compute.manager [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 693.600389] env[68244]: DEBUG nova.network.neutron [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.617031] env[68244]: DEBUG nova.network.neutron [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.643601] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2779997, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075552} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.644034] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 693.645353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133ac2cf-1ccb-496d-b14e-56907de8cc0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.675196] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 693.678712] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa508073-45b4-4a14-a1d2-84714e02438e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.700487] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 693.700487] env[68244]: value = "task-2779998" [ 693.700487] env[68244]: _type = "Task" [ 693.700487] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.712606] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2779998, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.759822] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f44b675-a2ac-4a77-aaa7-f14db87f05bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.763442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "57504eac-0d7f-4fbe-b08c-6864713cca94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.764432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.764432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "57504eac-0d7f-4fbe-b08c-6864713cca94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.764432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.764432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.766521] env[68244]: INFO nova.compute.manager [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Terminating instance [ 693.771757] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a017e5e3-be75-4c0b-aeff-db1f7b16793e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.810941] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22cf0c5-5a94-4b96-bd9d-80a9554a6272 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.821128] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450848f8-790c-4ee8-82e3-2b2a9f2ffabb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.844020] env[68244]: DEBUG nova.compute.provider_tree [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.848218] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.119593] env[68244]: DEBUG nova.network.neutron [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.213824] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2779998, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.279836] env[68244]: DEBUG nova.compute.manager [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.280226] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.281949] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f658c3-c2cd-4596-8207-2a78f2ee3382 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.294245] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 694.294517] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f857206-b341-4be1-ad7d-0d34dec8f880 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.305020] env[68244]: DEBUG oslo_vmware.api [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 694.305020] env[68244]: value = "task-2779999" [ 694.305020] env[68244]: _type = "Task" [ 694.305020] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.317469] env[68244]: DEBUG oslo_vmware.api [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.354926] env[68244]: DEBUG nova.scheduler.client.report [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.363206] env[68244]: DEBUG nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 694.393777] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 694.393777] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.393777] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.393777] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.394193] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.394193] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 694.394271] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 694.394450] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 694.394584] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 694.394928] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 694.394928] env[68244]: DEBUG nova.virt.hardware [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 694.397308] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffce6ab-3198-4b90-85e1-5ff8cee1def2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.400924] env[68244]: DEBUG nova.network.neutron [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Successfully created port: 4bc0d0f1-ef11-425c-987c-514c9b55015f {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.411990] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9921be58-8008-49d1-b0ca-c44288a072fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.623061] env[68244]: INFO nova.compute.manager [-] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Took 1.02 seconds to deallocate network for instance. [ 694.715544] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2779998, 'name': ReconfigVM_Task, 'duration_secs': 0.944603} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.716163] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Reconfigured VM instance instance-00000010 to attach disk [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.716843] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-004dc4d6-edcd-4ffd-8d5b-053c8117e58f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.725744] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 694.725744] env[68244]: value = "task-2780000" [ 694.725744] env[68244]: _type = "Task" [ 694.725744] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.741403] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780000, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.819369] env[68244]: DEBUG oslo_vmware.api [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2779999, 'name': PowerOffVM_Task, 'duration_secs': 0.192377} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.821494] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 694.824197] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 694.824610] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-119dad12-148b-4efd-b9a9-46ea040a9320 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.862794] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.865803] env[68244]: DEBUG nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 694.866383] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.715s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.866539] env[68244]: DEBUG nova.objects.instance [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 694.890906] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 694.891156] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 694.891336] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleting the datastore file [datastore2] 57504eac-0d7f-4fbe-b08c-6864713cca94 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 694.891633] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c71a3f3c-ac92-4f73-bd7c-eb6fcc7a9750 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.899524] env[68244]: DEBUG oslo_vmware.api [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 694.899524] env[68244]: value = "task-2780002" [ 694.899524] env[68244]: _type = "Task" [ 694.899524] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.916981] env[68244]: DEBUG oslo_vmware.api [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.132324] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.239125] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780000, 'name': Rename_Task, 'duration_secs': 0.170989} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.239625] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.241086] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f23f9d8-a05b-41a3-af64-f00b538ed1e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.254312] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 695.254312] env[68244]: value = "task-2780003" [ 695.254312] env[68244]: _type = "Task" [ 695.254312] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.266090] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.279347] env[68244]: DEBUG nova.compute.manager [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Received event network-changed-57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 695.279532] env[68244]: DEBUG nova.compute.manager [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Refreshing instance network info cache due to event network-changed-57f7dda3-98ee-46c7-871d-37b0add34372. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 695.279746] env[68244]: DEBUG oslo_concurrency.lockutils [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] Acquiring lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.279881] env[68244]: DEBUG oslo_concurrency.lockutils [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] Acquired lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.280798] env[68244]: DEBUG nova.network.neutron [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Refreshing network info cache for port 57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.375658] env[68244]: DEBUG nova.compute.utils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 695.377191] env[68244]: DEBUG nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.377362] env[68244]: DEBUG nova.network.neutron [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.408917] env[68244]: DEBUG oslo_vmware.api [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143268} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.409265] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.409585] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.409912] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.410153] env[68244]: INFO nova.compute.manager [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Took 1.13 seconds to destroy the instance on the hypervisor. [ 695.410806] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 695.410982] env[68244]: DEBUG nova.compute.manager [-] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 695.411169] env[68244]: DEBUG nova.network.neutron [-] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.468037] env[68244]: DEBUG nova.policy [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d5f52c48753455fa228eecaf2e5eb1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba31a861555b430cab07d044beaf3482', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.608474] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "2aacd21f-d664-4267-8331-d3862f43d35b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.608711] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "2aacd21f-d664-4267-8331-d3862f43d35b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.623010] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "bbc08614-926e-4209-abec-4808f223943a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.623241] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "bbc08614-926e-4209-abec-4808f223943a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.770167] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780003, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.881449] env[68244]: DEBUG nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 695.889207] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40962640-3192-4ee5-bdc5-a3babd4242ae tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.889207] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.588s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.898531] env[68244]: INFO nova.compute.claims [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.099130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "03af8758-fba3-4173-b998-d9e6b3113f8c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.099396] env[68244]: DEBUG oslo_concurrency.lockutils [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.099570] env[68244]: DEBUG nova.compute.manager [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 696.100513] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a375b14b-eafe-401e-b143-12cb3ba853cd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.112718] env[68244]: DEBUG nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.119404] env[68244]: DEBUG nova.compute.manager [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 696.120188] env[68244]: DEBUG nova.objects.instance [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lazy-loading 'flavor' on Instance uuid 03af8758-fba3-4173-b998-d9e6b3113f8c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 696.272844] env[68244]: DEBUG oslo_vmware.api [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780003, 'name': PowerOnVM_Task, 'duration_secs': 0.692756} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.272982] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 696.273133] env[68244]: INFO nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Took 9.74 seconds to spawn the instance on the hypervisor. [ 696.273372] env[68244]: DEBUG nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 696.274256] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9818d559-a8e6-4819-811b-1303b5414934 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.435624] env[68244]: DEBUG nova.network.neutron [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Updated VIF entry in instance network info cache for port 57f7dda3-98ee-46c7-871d-37b0add34372. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 696.436340] env[68244]: DEBUG nova.network.neutron [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Updating instance_info_cache with network_info: [{"id": "57f7dda3-98ee-46c7-871d-37b0add34372", "address": "fa:16:3e:79:0a:05", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57f7dda3-98", "ovs_interfaceid": "57f7dda3-98ee-46c7-871d-37b0add34372", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.440041] env[68244]: DEBUG nova.network.neutron [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Successfully created port: 454e7dd1-22ef-4014-9597-5df4c82d0759 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.511928] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.512474] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.646560] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.689682] env[68244]: DEBUG nova.network.neutron [-] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.793820] env[68244]: INFO nova.compute.manager [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Took 29.53 seconds to build instance. [ 696.913363] env[68244]: DEBUG nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.943688] env[68244]: DEBUG oslo_concurrency.lockutils [req-da4740ba-c25e-4291-a110-4d1a826c1e6e req-1bc4d292-a141-49e1-9f40-def387291770 service nova] Releasing lock "refresh_cache-3776b39a-d10b-4068-8b4b-5dc25798e088" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.955347] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.955660] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.955846] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.956073] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.956312] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.956463] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.956687] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.956877] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.957099] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.957429] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.957564] env[68244]: DEBUG nova.virt.hardware [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.958830] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4aab960-514e-45fe-9bd7-fbbc9fa61e2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.969208] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895f4d71-4bd9-4d41-a0b9-8a964c3e488f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.130243] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 697.130512] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-459e4719-4fe5-4496-9b56-e792b28330f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.141444] env[68244]: DEBUG oslo_vmware.api [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 697.141444] env[68244]: value = "task-2780004" [ 697.141444] env[68244]: _type = "Task" [ 697.141444] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.155497] env[68244]: DEBUG oslo_vmware.api [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.197390] env[68244]: INFO nova.compute.manager [-] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Took 1.79 seconds to deallocate network for instance. [ 697.297359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-749542d0-b8b1-451f-9f5a-9b94afac3b02 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.048s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.358074] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3965e918-c78c-4b99-8c8b-9b899d8db677 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.370841] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8663c43f-e36a-4b11-ae07-c1035d3bb939 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.413717] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad425b6-335c-4e52-b16a-1f24028c684b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.422297] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ddd4cf-b27b-4d6f-9616-4bb1076b6d58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.435947] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 697.501953] env[68244]: DEBUG nova.network.neutron [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Successfully updated port: 4bc0d0f1-ef11-425c-987c-514c9b55015f {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.652023] env[68244]: DEBUG oslo_vmware.api [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780004, 'name': PowerOffVM_Task, 'duration_secs': 0.297565} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.652335] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 697.652425] env[68244]: DEBUG nova.compute.manager [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.653635] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eec08b0-2fc6-4384-bc62-8b737f8647e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.711145] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.799862] env[68244]: DEBUG nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 697.978352] env[68244]: ERROR nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [req-9fd1ebe7-24e5-471f-944c-f042adedd3f0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9fd1ebe7-24e5-471f-944c-f042adedd3f0"}]} [ 698.006906] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 698.009539] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.010027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.010027] env[68244]: DEBUG nova.network.neutron [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.021767] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.022011] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.035314] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 698.035314] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.052469] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 698.079059] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 698.168194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-92dbea29-529f-4b9f-8545-a874d92bcf9b tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.069s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.324613] env[68244]: DEBUG nova.compute.manager [req-e138f027-9d4b-4932-8472-1882550af4cd req-f76b4b37-3526-493d-9e49-1aa35378b5a0 service nova] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Received event network-vif-deleted-0d74c09f-0ee9-498b-a744-56d26babef9c {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 698.324815] env[68244]: DEBUG nova.compute.manager [req-e138f027-9d4b-4932-8472-1882550af4cd req-f76b4b37-3526-493d-9e49-1aa35378b5a0 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Received event network-vif-plugged-4bc0d0f1-ef11-425c-987c-514c9b55015f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 698.325042] env[68244]: DEBUG oslo_concurrency.lockutils [req-e138f027-9d4b-4932-8472-1882550af4cd req-f76b4b37-3526-493d-9e49-1aa35378b5a0 service nova] Acquiring lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.327040] env[68244]: DEBUG oslo_concurrency.lockutils [req-e138f027-9d4b-4932-8472-1882550af4cd req-f76b4b37-3526-493d-9e49-1aa35378b5a0 service nova] Lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.330105] env[68244]: DEBUG oslo_concurrency.lockutils [req-e138f027-9d4b-4932-8472-1882550af4cd req-f76b4b37-3526-493d-9e49-1aa35378b5a0 service nova] Lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.330105] env[68244]: DEBUG nova.compute.manager [req-e138f027-9d4b-4932-8472-1882550af4cd req-f76b4b37-3526-493d-9e49-1aa35378b5a0 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] No waiting events found dispatching network-vif-plugged-4bc0d0f1-ef11-425c-987c-514c9b55015f {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 698.330105] env[68244]: WARNING nova.compute.manager [req-e138f027-9d4b-4932-8472-1882550af4cd req-f76b4b37-3526-493d-9e49-1aa35378b5a0 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Received unexpected event network-vif-plugged-4bc0d0f1-ef11-425c-987c-514c9b55015f for instance with vm_state building and task_state spawning. [ 698.340858] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.492569] env[68244]: DEBUG nova.network.neutron [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Successfully updated port: 454e7dd1-22ef-4014-9597-5df4c82d0759 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 698.559291] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b035ea-d4b3-4185-adb7-5d61db84020e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.571443] env[68244]: DEBUG nova.network.neutron [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.579025] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374bfac3-faa5-48ed-82e4-d725c7217e73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.616143] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80d9cbd-350a-40a0-8f77-e24d1dcd9f7c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.619610] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "09ab8712-0f7a-4122-9d61-19da3e65d22b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.619907] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.626288] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d36c4c-90e3-47fe-ac8b-f7a9727ea5df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.645132] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.913344] env[68244]: DEBUG nova.compute.manager [req-dee4bd3d-3ecf-4228-897a-eb3a9d19d112 req-06f25cfe-d031-45e7-8455-7a2853a7a073 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Received event network-vif-plugged-454e7dd1-22ef-4014-9597-5df4c82d0759 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 698.913594] env[68244]: DEBUG oslo_concurrency.lockutils [req-dee4bd3d-3ecf-4228-897a-eb3a9d19d112 req-06f25cfe-d031-45e7-8455-7a2853a7a073 service nova] Acquiring lock "59b0dd89-0093-4e50-9428-8db5c7fd429d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.913831] env[68244]: DEBUG oslo_concurrency.lockutils [req-dee4bd3d-3ecf-4228-897a-eb3a9d19d112 req-06f25cfe-d031-45e7-8455-7a2853a7a073 service nova] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.914050] env[68244]: DEBUG oslo_concurrency.lockutils [req-dee4bd3d-3ecf-4228-897a-eb3a9d19d112 req-06f25cfe-d031-45e7-8455-7a2853a7a073 service nova] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.914238] env[68244]: DEBUG nova.compute.manager [req-dee4bd3d-3ecf-4228-897a-eb3a9d19d112 req-06f25cfe-d031-45e7-8455-7a2853a7a073 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] No waiting events found dispatching network-vif-plugged-454e7dd1-22ef-4014-9597-5df4c82d0759 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 698.914378] env[68244]: WARNING nova.compute.manager [req-dee4bd3d-3ecf-4228-897a-eb3a9d19d112 req-06f25cfe-d031-45e7-8455-7a2853a7a073 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Received unexpected event network-vif-plugged-454e7dd1-22ef-4014-9597-5df4c82d0759 for instance with vm_state building and task_state spawning. [ 698.998836] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "refresh_cache-59b0dd89-0093-4e50-9428-8db5c7fd429d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.999099] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquired lock "refresh_cache-59b0dd89-0093-4e50-9428-8db5c7fd429d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.999264] env[68244]: DEBUG nova.network.neutron [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.090596] env[68244]: DEBUG nova.network.neutron [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance_info_cache with network_info: [{"id": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "address": "fa:16:3e:78:0b:4f", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bc0d0f1-ef", "ovs_interfaceid": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.174588] env[68244]: ERROR nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [req-b520e053-30fc-4e7c-b11d-bf77c0d322de] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b520e053-30fc-4e7c-b11d-bf77c0d322de"}]} [ 699.195528] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 699.213120] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 699.213252] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 699.234801] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 699.261067] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 699.556447] env[68244]: DEBUG nova.network.neutron [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.594267] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.595342] env[68244]: DEBUG nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Instance network_info: |[{"id": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "address": "fa:16:3e:78:0b:4f", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bc0d0f1-ef", "ovs_interfaceid": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 699.595678] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:0b:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bc0d0f1-ef11-425c-987c-514c9b55015f', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 699.603633] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Creating folder: Project (5bf57141eff643a8b03f3b0576678ec1). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.603633] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c49f6c1-bf4f-4ea7-9fae-f877714c13c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.613985] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Created folder: Project (5bf57141eff643a8b03f3b0576678ec1) in parent group-v558876. [ 699.614211] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Creating folder: Instances. Parent ref: group-v558929. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.614461] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e428d3de-8329-43e4-83e4-46c5853c1742 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.629015] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Created folder: Instances in parent group-v558929. [ 699.629015] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.629015] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 699.629015] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c97bc09-3aad-43c0-9957-f2743dc77dac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.662465] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 699.662465] env[68244]: value = "task-2780007" [ 699.662465] env[68244]: _type = "Task" [ 699.662465] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.682548] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780007, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.764302] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b0b715-c9e5-4aec-bdb5-947c00e7372f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.773962] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dd58f0-6960-411c-9abf-98fc1f7af9e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.821115] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb1924e-5db8-4702-a757-72596954a129 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.833657] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5e69ae-5b2a-44c1-bdae-bedc0824ffb0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.851655] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 700.003829] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "7778c027-d4af-436c-a545-aa513c0b1127" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.004087] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "7778c027-d4af-436c-a545-aa513c0b1127" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.007977] env[68244]: DEBUG nova.network.neutron [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Updating instance_info_cache with network_info: [{"id": "454e7dd1-22ef-4014-9597-5df4c82d0759", "address": "fa:16:3e:a1:86:1b", "network": {"id": "f5ed040d-c605-4ed1-b2de-d4228912be0d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1108201790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba31a861555b430cab07d044beaf3482", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c883fb98-d172-4510-8cf4-07aafdf771af", "external-id": "nsx-vlan-transportzone-570", "segmentation_id": 570, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap454e7dd1-22", "ovs_interfaceid": "454e7dd1-22ef-4014-9597-5df4c82d0759", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.155646] env[68244]: INFO nova.compute.manager [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Rebuilding instance [ 700.173279] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780007, 'name': CreateVM_Task, 'duration_secs': 0.504204} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.173486] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 700.178116] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.178306] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.178670] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 700.178969] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aba0309a-3c0a-4954-805f-91c0119ba236 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.190821] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 700.190821] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529db365-c459-6c61-7ebd-a719d0010de2" [ 700.190821] env[68244]: _type = "Task" [ 700.190821] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.210342] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529db365-c459-6c61-7ebd-a719d0010de2, 'name': SearchDatastore_Task, 'duration_secs': 0.011479} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.210959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.210959] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.211113] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.211331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.211400] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.211685] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afe0af30-fc1b-46fd-9d25-a8acabd482ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.223010] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.223295] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 700.225917] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f1ee37d-bb9b-4acb-9ff8-85f00b47362c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.228816] env[68244]: DEBUG nova.compute.manager [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 700.229588] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61ea254-e324-4027-a10d-bc23c3555ed4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.240346] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 700.240346] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ef17a2-2992-ef8c-0dbc-8a05a787c83f" [ 700.240346] env[68244]: _type = "Task" [ 700.240346] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.249491] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ef17a2-2992-ef8c-0dbc-8a05a787c83f, 'name': SearchDatastore_Task, 'duration_secs': 0.010059} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.250435] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17382755-81ea-4386-b522-3c8539f44b8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.255665] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 700.255665] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525e9287-6733-969b-6beb-fe34c9b3e795" [ 700.255665] env[68244]: _type = "Task" [ 700.255665] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.264053] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525e9287-6733-969b-6beb-fe34c9b3e795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.374277] env[68244]: ERROR nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [req-2cff5e91-95a5-4e6e-8dcd-61c292ee658f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2cff5e91-95a5-4e6e-8dcd-61c292ee658f"}]} [ 700.395370] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 700.416776] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 700.417022] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 700.438087] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 700.461305] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 700.512598] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Releasing lock "refresh_cache-59b0dd89-0093-4e50-9428-8db5c7fd429d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.512945] env[68244]: DEBUG nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Instance network_info: |[{"id": "454e7dd1-22ef-4014-9597-5df4c82d0759", "address": "fa:16:3e:a1:86:1b", "network": {"id": "f5ed040d-c605-4ed1-b2de-d4228912be0d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1108201790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba31a861555b430cab07d044beaf3482", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c883fb98-d172-4510-8cf4-07aafdf771af", "external-id": "nsx-vlan-transportzone-570", "segmentation_id": 570, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap454e7dd1-22", "ovs_interfaceid": "454e7dd1-22ef-4014-9597-5df4c82d0759", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 700.513731] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:86:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c883fb98-d172-4510-8cf4-07aafdf771af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '454e7dd1-22ef-4014-9597-5df4c82d0759', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.521454] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Creating folder: Project (ba31a861555b430cab07d044beaf3482). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 700.521709] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38785327-b42a-46d8-bbcc-fc18de1a1077 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.532209] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Created folder: Project (ba31a861555b430cab07d044beaf3482) in parent group-v558876. [ 700.533346] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Creating folder: Instances. Parent ref: group-v558932. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 700.533346] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-136756b5-542a-4edc-a66d-17695b0cc2f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.540969] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Created folder: Instances in parent group-v558932. [ 700.541334] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.541460] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 700.541662] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d7b8236-cef3-432d-9592-b55f10e2e819 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.564845] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.564845] env[68244]: value = "task-2780010" [ 700.564845] env[68244]: _type = "Task" [ 700.564845] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.575500] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780010, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.774153] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525e9287-6733-969b-6beb-fe34c9b3e795, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.774431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.774691] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 10957648-8618-4f2c-8b08-5468bca20cfc/10957648-8618-4f2c-8b08-5468bca20cfc.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 700.774959] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-910ae4c2-3d87-4351-9b19-6ca767d8742d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.784862] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 700.784862] env[68244]: value = "task-2780011" [ 700.784862] env[68244]: _type = "Task" [ 700.784862] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.793212] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.877913] env[68244]: DEBUG nova.compute.manager [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Received event network-changed-4bc0d0f1-ef11-425c-987c-514c9b55015f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 700.878068] env[68244]: DEBUG nova.compute.manager [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Refreshing instance network info cache due to event network-changed-4bc0d0f1-ef11-425c-987c-514c9b55015f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 700.878243] env[68244]: DEBUG oslo_concurrency.lockutils [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] Acquiring lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.878387] env[68244]: DEBUG oslo_concurrency.lockutils [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] Acquired lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.878561] env[68244]: DEBUG nova.network.neutron [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Refreshing network info cache for port 4bc0d0f1-ef11-425c-987c-514c9b55015f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.003208] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd5d793-6b97-4788-85a9-0da119550615 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.016237] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f88470-123f-4aae-ba88-26d5c6ca680d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.057600] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd2f91b-fe98-4c21-a887-af51c053fe7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.077810] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa23f24-f970-479f-ab5e-37d5b20d3c88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.090986] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780010, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.099930] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 701.247631] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 701.247631] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-201a03a7-9868-4c46-9409-431b9261f2bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.255787] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 701.255787] env[68244]: value = "task-2780012" [ 701.255787] env[68244]: _type = "Task" [ 701.255787] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.264951] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.298939] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481228} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.298939] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 10957648-8618-4f2c-8b08-5468bca20cfc/10957648-8618-4f2c-8b08-5468bca20cfc.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 701.299152] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 701.299356] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b9eff40-4f56-4efb-a9d5-f1ab62c42fed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.306709] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 701.306709] env[68244]: value = "task-2780013" [ 701.306709] env[68244]: _type = "Task" [ 701.306709] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.320054] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780013, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.417585] env[68244]: DEBUG nova.compute.manager [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Received event network-changed-454e7dd1-22ef-4014-9597-5df4c82d0759 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 701.417585] env[68244]: DEBUG nova.compute.manager [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Refreshing instance network info cache due to event network-changed-454e7dd1-22ef-4014-9597-5df4c82d0759. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 701.417585] env[68244]: DEBUG oslo_concurrency.lockutils [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] Acquiring lock "refresh_cache-59b0dd89-0093-4e50-9428-8db5c7fd429d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.417585] env[68244]: DEBUG oslo_concurrency.lockutils [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] Acquired lock "refresh_cache-59b0dd89-0093-4e50-9428-8db5c7fd429d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.417585] env[68244]: DEBUG nova.network.neutron [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Refreshing network info cache for port 454e7dd1-22ef-4014-9597-5df4c82d0759 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.583583] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780010, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.653686] env[68244]: DEBUG nova.compute.manager [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 701.654735] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af69839-ac29-4b50-b8d6-aaefc4e428b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.698817] env[68244]: DEBUG nova.scheduler.client.report [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 38 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 701.699116] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 38 to 39 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 701.699302] env[68244]: DEBUG nova.compute.provider_tree [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 701.774058] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.817151] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780013, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069809} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.817657] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 701.818932] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a679086-248a-443f-8326-77a89b60b947 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.845389] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 10957648-8618-4f2c-8b08-5468bca20cfc/10957648-8618-4f2c-8b08-5468bca20cfc.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 701.845728] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-956918e8-b7b5-44a9-a5ff-a9092e66c583 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.861111] env[68244]: DEBUG nova.network.neutron [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updated VIF entry in instance network info cache for port 4bc0d0f1-ef11-425c-987c-514c9b55015f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 701.861425] env[68244]: DEBUG nova.network.neutron [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance_info_cache with network_info: [{"id": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "address": "fa:16:3e:78:0b:4f", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bc0d0f1-ef", "ovs_interfaceid": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.868271] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 701.868271] env[68244]: value = "task-2780014" [ 701.868271] env[68244]: _type = "Task" [ 701.868271] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.876693] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.084598] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780010, 'name': CreateVM_Task, 'duration_secs': 1.508418} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.087294] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 702.087991] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.088195] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.088630] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 702.089165] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a59062c8-6ee2-4d1f-a056-132d50cd4647 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.094075] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 702.094075] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eda8f0-0a7f-719e-d22e-887caf881b67" [ 702.094075] env[68244]: _type = "Task" [ 702.094075] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.103155] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eda8f0-0a7f-719e-d22e-887caf881b67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.175148] env[68244]: INFO nova.compute.manager [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] instance snapshotting [ 702.175473] env[68244]: WARNING nova.compute.manager [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 702.179659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526cecf3-0ccd-4d60-9997-fdd384170db6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.203277] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2fca63-31e2-4ede-bcf8-ab7f40dd5cec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.206210] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.318s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.206856] env[68244]: DEBUG nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 702.209556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.341s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.209666] env[68244]: DEBUG nova.objects.instance [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lazy-loading 'resources' on Instance uuid aebd1200-ae52-4537-a677-24b57b581517 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 702.225361] env[68244]: DEBUG nova.network.neutron [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Updated VIF entry in instance network info cache for port 454e7dd1-22ef-4014-9597-5df4c82d0759. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 702.225702] env[68244]: DEBUG nova.network.neutron [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Updating instance_info_cache with network_info: [{"id": "454e7dd1-22ef-4014-9597-5df4c82d0759", "address": "fa:16:3e:a1:86:1b", "network": {"id": "f5ed040d-c605-4ed1-b2de-d4228912be0d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1108201790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba31a861555b430cab07d044beaf3482", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c883fb98-d172-4510-8cf4-07aafdf771af", "external-id": "nsx-vlan-transportzone-570", "segmentation_id": 570, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap454e7dd1-22", "ovs_interfaceid": "454e7dd1-22ef-4014-9597-5df4c82d0759", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.267503] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.364562] env[68244]: DEBUG oslo_concurrency.lockutils [req-9492b5c4-5b1b-444c-b14d-e8328cac42d3 req-9170f5b9-c590-47b1-81d8-c41c9c8c5266 service nova] Releasing lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.378838] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780014, 'name': ReconfigVM_Task, 'duration_secs': 0.290165} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.379368] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 10957648-8618-4f2c-8b08-5468bca20cfc/10957648-8618-4f2c-8b08-5468bca20cfc.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 702.380147] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0afb4220-4216-436f-b019-d06a6accf85b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.388640] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 702.388640] env[68244]: value = "task-2780015" [ 702.388640] env[68244]: _type = "Task" [ 702.388640] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.401581] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780015, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.605832] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eda8f0-0a7f-719e-d22e-887caf881b67, 'name': SearchDatastore_Task, 'duration_secs': 0.010065} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.606152] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.607023] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 702.607023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.607023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.607023] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 702.607220] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86f97d3d-f8ba-401d-adcb-272065acbd51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.621370] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.621564] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 702.622449] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eded0958-ffe6-4cc0-9ca2-faa6b1955611 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.630300] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 702.630300] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529e4cf0-0ac8-9dc8-3467-bd41a293af12" [ 702.630300] env[68244]: _type = "Task" [ 702.630300] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.638520] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529e4cf0-0ac8-9dc8-3467-bd41a293af12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.717309] env[68244]: DEBUG nova.compute.utils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 702.722994] env[68244]: DEBUG nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 702.723334] env[68244]: DEBUG nova.network.neutron [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.725953] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 702.727054] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bc4cc91f-9298-4b06-bcd7-f3648a8113d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.730197] env[68244]: DEBUG oslo_concurrency.lockutils [req-a67fdb7c-23fa-4d28-a840-290dce16fd93 req-7d4a1999-0e99-475f-865f-ef1ae100be78 service nova] Releasing lock "refresh_cache-59b0dd89-0093-4e50-9428-8db5c7fd429d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.736483] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 702.736483] env[68244]: value = "task-2780016" [ 702.736483] env[68244]: _type = "Task" [ 702.736483] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.753042] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.769329] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780012, 'name': PowerOffVM_Task, 'duration_secs': 1.103958} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.770608] env[68244]: DEBUG nova.policy [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94887627a23e4ff09e0c530ef5b1afb5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4138f768f30b41d1983fc67959dec2e1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 702.774424] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.774746] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.775762] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287db930-8d11-4039-99b2-53f7c9a1e47d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.783411] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 702.784674] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d69b7c99-77cd-495d-a653-35165cca69a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.845771] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 702.845994] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 702.847163] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleting the datastore file [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.847163] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6e397d8-fbb9-47df-aaac-873badbf30b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.860774] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 702.860774] env[68244]: value = "task-2780018" [ 702.860774] env[68244]: _type = "Task" [ 702.860774] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.868362] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.901123] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780015, 'name': Rename_Task, 'duration_secs': 0.152456} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.904503] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 702.905050] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-969c1f5f-fb09-4057-bb08-110b233589d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.911573] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 702.911573] env[68244]: value = "task-2780019" [ 702.911573] env[68244]: _type = "Task" [ 702.911573] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.922180] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.143606] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529e4cf0-0ac8-9dc8-3467-bd41a293af12, 'name': SearchDatastore_Task, 'duration_secs': 0.022702} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.152704] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d02c1af3-b0a3-46ed-9c92-177c787684b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.157908] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 703.157908] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52570259-4fc8-c234-4b80-9d16bdfc39fb" [ 703.157908] env[68244]: _type = "Task" [ 703.157908] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.165974] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52570259-4fc8-c234-4b80-9d16bdfc39fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.226174] env[68244]: DEBUG nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 703.257981] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780016, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.334455] env[68244]: DEBUG nova.network.neutron [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Successfully created port: 1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.343059] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ea934a-5219-41dc-99af-aaffa8c57e73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.354057] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64c974e-4ada-46c7-a09c-979c8763b23a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.393289] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b85341-a80d-4188-8f09-bcf5270eb470 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.407833] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226962} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.407990] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.408357] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.408357] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.412043] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb657f9b-e413-4f2a-93da-3d4f0b0be5dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.431603] env[68244]: DEBUG nova.compute.provider_tree [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 703.436196] env[68244]: DEBUG oslo_vmware.api [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780019, 'name': PowerOnVM_Task, 'duration_secs': 0.498517} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.436196] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 703.436196] env[68244]: INFO nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Took 9.07 seconds to spawn the instance on the hypervisor. [ 703.436196] env[68244]: DEBUG nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 703.436466] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7810f44e-1622-40cb-9343-ae035eee1284 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.674998] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52570259-4fc8-c234-4b80-9d16bdfc39fb, 'name': SearchDatastore_Task, 'duration_secs': 0.013608} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.675287] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.675539] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 59b0dd89-0093-4e50-9428-8db5c7fd429d/59b0dd89-0093-4e50-9428-8db5c7fd429d.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 703.675788] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5bce63c-7964-4882-8731-a0678e6c01a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.685819] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 703.685819] env[68244]: value = "task-2780024" [ 703.685819] env[68244]: _type = "Task" [ 703.685819] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.697075] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.746840] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780016, 'name': CreateSnapshot_Task, 'duration_secs': 0.525273} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.747660] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 703.749566] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd067b05-3f9c-49bb-bf06-ebad91fc0dfc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.963210] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.963818] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.964286] env[68244]: INFO nova.compute.manager [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Took 34.53 seconds to build instance. [ 703.968227] env[68244]: ERROR nova.scheduler.client.report [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] [req-53c30c2c-5e4f-47d4-9736-0e99c65f8842] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-53c30c2c-5e4f-47d4-9736-0e99c65f8842"}]} [ 703.990027] env[68244]: DEBUG nova.scheduler.client.report [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 704.003595] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "086dda59-4bd2-4ca2-a758-c120f1271f42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.003970] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.012159] env[68244]: DEBUG nova.scheduler.client.report [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 704.012572] env[68244]: DEBUG nova.compute.provider_tree [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 704.026433] env[68244]: DEBUG nova.scheduler.client.report [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 704.055578] env[68244]: DEBUG nova.scheduler.client.report [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 704.056282] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.056556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.198085] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469775} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.198313] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 59b0dd89-0093-4e50-9428-8db5c7fd429d/59b0dd89-0093-4e50-9428-8db5c7fd429d.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 704.198488] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 704.198693] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab47ed25-2a6c-4353-982a-9a1cfac31f91 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.207936] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 704.207936] env[68244]: value = "task-2780025" [ 704.207936] env[68244]: _type = "Task" [ 704.207936] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.218430] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.242256] env[68244]: DEBUG nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 704.276052] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 704.278762] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6dfad4d4-75c1-4a00-91a0-11481b18b001 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.292109] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.292849] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.292849] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.292849] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.293051] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.293100] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.293871] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.293871] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.293871] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.294046] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.294432] env[68244]: DEBUG nova.virt.hardware [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.294978] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba400f01-6555-454c-b1ca-8dd6ee0aa56e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.299242] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 704.299242] env[68244]: value = "task-2780026" [ 704.299242] env[68244]: _type = "Task" [ 704.299242] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.311158] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6293db6-e4b8-4396-9a6e-d5483966e633 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.325637] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780026, 'name': CloneVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.449921] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.450276] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.450382] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.450603] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.450653] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.450776] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.450974] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.451725] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.451725] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.451725] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.451725] env[68244]: DEBUG nova.virt.hardware [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.453607] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242eab7d-ebc9-4393-b619-0713eacc3129 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.464111] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3144307-e151-47a9-996a-f79b275948a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.482072] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f06a22bb-7ce3-43ff-a798-372c29e14882 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.213s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.482674] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:b3:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91539a24-6b83-487e-9863-9e0ff0231dd9', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.490640] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.494655] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 704.496140] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3d73a1a-a43d-4b68-8eaa-41318615f925 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.519056] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 704.519056] env[68244]: value = "task-2780027" [ 704.519056] env[68244]: _type = "Task" [ 704.519056] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.527609] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780027, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.637768] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91f237e-17c1-4a3f-b84c-36509af06621 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.647769] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704c9073-19bc-49d7-b308-10f97058fbf4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.680259] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27825654-ee4d-4a5c-aa07-3d4e64d1886f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.688511] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bc6943-8e82-4e78-9c27-c9e0439afe35 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.702478] env[68244]: DEBUG nova.compute.provider_tree [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 704.718168] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.233768} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.719316] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 704.720166] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d176a42b-3d53-437d-906b-a2838fea88c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.751669] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 59b0dd89-0093-4e50-9428-8db5c7fd429d/59b0dd89-0093-4e50-9428-8db5c7fd429d.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 704.751927] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9626fceb-0389-4580-a6ba-75d18758197d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.772563] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 704.772563] env[68244]: value = "task-2780028" [ 704.772563] env[68244]: _type = "Task" [ 704.772563] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.780427] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780028, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.812026] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780026, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.010981] env[68244]: DEBUG nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 705.031437] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780027, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.098977] env[68244]: DEBUG nova.compute.manager [req-c68e0d25-60a3-47df-9526-d56cc75554d9 req-ecbab8c3-5dd3-441c-bc29-1cda0128ff18 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received event network-vif-plugged-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 705.099295] env[68244]: DEBUG oslo_concurrency.lockutils [req-c68e0d25-60a3-47df-9526-d56cc75554d9 req-ecbab8c3-5dd3-441c-bc29-1cda0128ff18 service nova] Acquiring lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.099522] env[68244]: DEBUG oslo_concurrency.lockutils [req-c68e0d25-60a3-47df-9526-d56cc75554d9 req-ecbab8c3-5dd3-441c-bc29-1cda0128ff18 service nova] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.099755] env[68244]: DEBUG oslo_concurrency.lockutils [req-c68e0d25-60a3-47df-9526-d56cc75554d9 req-ecbab8c3-5dd3-441c-bc29-1cda0128ff18 service nova] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.100299] env[68244]: DEBUG nova.compute.manager [req-c68e0d25-60a3-47df-9526-d56cc75554d9 req-ecbab8c3-5dd3-441c-bc29-1cda0128ff18 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] No waiting events found dispatching network-vif-plugged-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 705.100299] env[68244]: WARNING nova.compute.manager [req-c68e0d25-60a3-47df-9526-d56cc75554d9 req-ecbab8c3-5dd3-441c-bc29-1cda0128ff18 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received unexpected event network-vif-plugged-1266d6b5-36fc-49f9-ab98-42add17e5a24 for instance with vm_state building and task_state spawning. [ 705.245893] env[68244]: DEBUG nova.scheduler.client.report [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 705.246251] env[68244]: DEBUG nova.compute.provider_tree [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 42 to 43 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 705.246416] env[68244]: DEBUG nova.compute.provider_tree [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.284816] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780028, 'name': ReconfigVM_Task, 'duration_secs': 0.362519} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.285244] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 59b0dd89-0093-4e50-9428-8db5c7fd429d/59b0dd89-0093-4e50-9428-8db5c7fd429d.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.285780] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3305ed3f-1e04-4351-ad7c-5834ae62a310 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.296477] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 705.296477] env[68244]: value = "task-2780029" [ 705.296477] env[68244]: _type = "Task" [ 705.296477] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.305835] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780029, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.317224] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780026, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.332987] env[68244]: DEBUG nova.network.neutron [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Successfully updated port: 1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.535944] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780027, 'name': CreateVM_Task, 'duration_secs': 0.886978} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.536137] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 705.536851] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.537037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.537377] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 705.537657] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-699add4f-190e-4472-8a86-efb171d3c1fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.544961] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 705.544961] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e45193-35a3-0081-cbbe-e7cf22735266" [ 705.544961] env[68244]: _type = "Task" [ 705.544961] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.545370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.554487] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e45193-35a3-0081-cbbe-e7cf22735266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.753356] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.543s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.757098] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.191s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.761020] env[68244]: DEBUG nova.objects.instance [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lazy-loading 'resources' on Instance uuid 23f2ad6c-ea98-4a32-a79a-75cec6fc925e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 705.784703] env[68244]: INFO nova.scheduler.client.report [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Deleted allocations for instance aebd1200-ae52-4537-a677-24b57b581517 [ 705.810201] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780029, 'name': Rename_Task, 'duration_secs': 0.206659} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.814612] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 705.814921] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69bc4ab4-3ec1-4828-9b8a-e381e36b73a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.826266] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780026, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.828565] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 705.828565] env[68244]: value = "task-2780030" [ 705.828565] env[68244]: _type = "Task" [ 705.828565] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.836806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.837045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.837139] env[68244]: DEBUG nova.network.neutron [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.849071] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780030, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.059694] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e45193-35a3-0081-cbbe-e7cf22735266, 'name': SearchDatastore_Task, 'duration_secs': 0.009889} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.060168] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.060672] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.061088] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.062127] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.063019] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 706.063019] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b324fd9-75c0-43b0-bd65-92134abab9b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.074757] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 706.074957] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 706.075731] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ace652-f90b-403b-8dc2-88eb241391db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.081968] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 706.081968] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5293521c-53a3-abc4-2897-d00cb7bb0404" [ 706.081968] env[68244]: _type = "Task" [ 706.081968] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.090906] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5293521c-53a3-abc4-2897-d00cb7bb0404, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.296831] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57645167-9825-4718-ae85-d3927748b40b tempest-ServerDiagnosticsTest-49068384 tempest-ServerDiagnosticsTest-49068384-project-member] Lock "aebd1200-ae52-4537-a677-24b57b581517" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.567s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.328827] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780026, 'name': CloneVM_Task, 'duration_secs': 1.944528} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.329173] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Created linked-clone VM from snapshot [ 706.333387] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2947ab44-ca59-40ce-9071-36f628a5d4d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.347431] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Uploading image a31395b3-1f47-46e6-9150-8a7f532927d5 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 706.357252] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780030, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.399066] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 706.399066] env[68244]: value = "vm-558939" [ 706.399066] env[68244]: _type = "VirtualMachine" [ 706.399066] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 706.399399] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4cb14513-9210-4399-9a81-218173116a3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.410610] env[68244]: DEBUG nova.network.neutron [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.416994] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease: (returnval){ [ 706.416994] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9e3b7-c4a7-f458-3f99-190d42f15b85" [ 706.416994] env[68244]: _type = "HttpNfcLease" [ 706.416994] env[68244]: } obtained for exporting VM: (result){ [ 706.416994] env[68244]: value = "vm-558939" [ 706.416994] env[68244]: _type = "VirtualMachine" [ 706.416994] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 706.416994] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the lease: (returnval){ [ 706.416994] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9e3b7-c4a7-f458-3f99-190d42f15b85" [ 706.416994] env[68244]: _type = "HttpNfcLease" [ 706.416994] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 706.428008] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 706.428008] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9e3b7-c4a7-f458-3f99-190d42f15b85" [ 706.428008] env[68244]: _type = "HttpNfcLease" [ 706.428008] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 706.593485] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5293521c-53a3-abc4-2897-d00cb7bb0404, 'name': SearchDatastore_Task, 'duration_secs': 0.009419} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.595036] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db2a9991-c448-483b-a362-31808bc99c2e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.599977] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 706.599977] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5278f758-d81f-7702-448c-c371f90fb917" [ 706.599977] env[68244]: _type = "Task" [ 706.599977] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.615426] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5278f758-d81f-7702-448c-c371f90fb917, 'name': SearchDatastore_Task, 'duration_secs': 0.011975} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.615723] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.615902] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 706.616170] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ca9fe84-56b7-48b5-af1b-0960391b1705 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.621580] env[68244]: DEBUG nova.network.neutron [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updating instance_info_cache with network_info: [{"id": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "address": "fa:16:3e:b7:e2:f5", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1266d6b5-36", "ovs_interfaceid": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.624643] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 706.624643] env[68244]: value = "task-2780033" [ 706.624643] env[68244]: _type = "Task" [ 706.624643] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.632893] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780033, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.791726] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d21c95-4f36-40da-aea4-8c901f50de7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.805923] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e1edcc-67a7-4e0c-8b26-f76e21301a5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.850170] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce99e30-f9d1-4a00-a707-57f524e3b5f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.862129] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3875a532-4a94-4e5e-bd79-627c6cb7f9bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.866048] env[68244]: DEBUG oslo_vmware.api [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780030, 'name': PowerOnVM_Task, 'duration_secs': 0.61228} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.866466] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 706.866684] env[68244]: INFO nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Took 9.95 seconds to spawn the instance on the hypervisor. [ 706.866866] env[68244]: DEBUG nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 706.868518] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb19520-3462-486a-bfc2-7319d203d330 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.884071] env[68244]: DEBUG nova.compute.provider_tree [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.930856] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 706.930856] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9e3b7-c4a7-f458-3f99-190d42f15b85" [ 706.930856] env[68244]: _type = "HttpNfcLease" [ 706.930856] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 706.931248] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 706.931248] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9e3b7-c4a7-f458-3f99-190d42f15b85" [ 706.931248] env[68244]: _type = "HttpNfcLease" [ 706.931248] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 706.932049] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e238b8-ec06-4aa8-b401-36a1fe016197 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.940391] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52321e0f-d00e-2724-56c1-c97902025590/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 706.940593] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52321e0f-d00e-2724-56c1-c97902025590/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 707.125365] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Releasing lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.125713] env[68244]: DEBUG nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Instance network_info: |[{"id": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "address": "fa:16:3e:b7:e2:f5", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1266d6b5-36", "ovs_interfaceid": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 707.126235] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:e2:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c405e9f-a6c8-4308-acac-071654efe18e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1266d6b5-36fc-49f9-ab98-42add17e5a24', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.135520] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Creating folder: Project (4138f768f30b41d1983fc67959dec2e1). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.141364] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18cd0d8c-fa51-468d-9c61-5d92af42f176 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.144503] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d87f7a0c-9b06-4c94-a5a9-e490c3e95024 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.150194] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780033, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464498} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.151382] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.151382] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.151382] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b962ea54-1555-4425-b1ff-89a0b80954d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.160061] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 707.160061] env[68244]: value = "task-2780035" [ 707.160061] env[68244]: _type = "Task" [ 707.160061] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.171261] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Created folder: Project (4138f768f30b41d1983fc67959dec2e1) in parent group-v558876. [ 707.171261] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Creating folder: Instances. Parent ref: group-v558941. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.171421] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08723637-d8a5-4fa2-98aa-1ff7f2418232 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.186876] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780035, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.198462] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Created folder: Instances in parent group-v558941. [ 707.198866] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 707.199452] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 707.199872] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cbac512-379f-46d3-8a3d-7e78683146a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.241655] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.241655] env[68244]: value = "task-2780037" [ 707.241655] env[68244]: _type = "Task" [ 707.241655] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.254968] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780037, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.388466] env[68244]: DEBUG nova.scheduler.client.report [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.406674] env[68244]: INFO nova.compute.manager [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Took 34.97 seconds to build instance. [ 707.409502] env[68244]: DEBUG nova.compute.manager [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 707.409746] env[68244]: DEBUG nova.compute.manager [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing instance network info cache due to event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 707.410118] env[68244]: DEBUG oslo_concurrency.lockutils [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] Acquiring lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.410378] env[68244]: DEBUG oslo_concurrency.lockutils [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] Acquired lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.410624] env[68244]: DEBUG nova.network.neutron [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.676849] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780035, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068172} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.676849] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 707.676849] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d04112-f230-40ef-a481-ae3a868e1d16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.710847] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.711319] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d32728b1-1dca-43ae-853a-b0b00906b33a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.731806] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 707.731806] env[68244]: value = "task-2780038" [ 707.731806] env[68244]: _type = "Task" [ 707.731806] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.743853] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.751615] env[68244]: DEBUG nova.compute.manager [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 707.766342] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780037, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.902024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.145s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.903471] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.652s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.905605] env[68244]: INFO nova.compute.claims [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.909414] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ffad5fb-57f0-4e28-aa60-ca716f7079a1 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.691s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.932285] env[68244]: INFO nova.scheduler.client.report [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Deleted allocations for instance 23f2ad6c-ea98-4a32-a79a-75cec6fc925e [ 708.248034] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780038, 'name': ReconfigVM_Task, 'duration_secs': 0.504629} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.251290] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Reconfigured VM instance instance-00000010 to attach disk [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626/d73f87d2-41b3-4396-b5b5-932f8c6bf626.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 708.251725] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a53cc70-a67e-487f-853f-ec472891e023 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.262704] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780037, 'name': CreateVM_Task, 'duration_secs': 0.546891} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.264585] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 708.264892] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 708.264892] env[68244]: value = "task-2780040" [ 708.264892] env[68244]: _type = "Task" [ 708.264892] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.265540] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.268195] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.268195] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 708.268195] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7ee716d-4c2f-47f0-a327-d8cb54356e0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.281463] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 708.281463] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5237e67d-42f5-48f3-36a3-8b7054759dd0" [ 708.281463] env[68244]: _type = "Task" [ 708.281463] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.286653] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780040, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.287729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.297403] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5237e67d-42f5-48f3-36a3-8b7054759dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.01155} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.301186] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.301186] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.301186] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.301186] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.301396] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.301396] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ddc9c64-c043-4fe4-b3bd-fafaa13f6aca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.308562] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.312028] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.312028] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea87d41b-6860-4be9-a353-d7d9ea364fa5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.316726] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 708.316726] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c1083-9383-7ca0-39f7-028009b6e214" [ 708.316726] env[68244]: _type = "Task" [ 708.316726] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.325091] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c1083-9383-7ca0-39f7-028009b6e214, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.344935] env[68244]: DEBUG nova.network.neutron [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updated VIF entry in instance network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 708.345128] env[68244]: DEBUG nova.network.neutron [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updating instance_info_cache with network_info: [{"id": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "address": "fa:16:3e:b7:e2:f5", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1266d6b5-36", "ovs_interfaceid": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.418112] env[68244]: DEBUG nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 708.442284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ffce55cb-d31f-490e-8cec-c7642c05693b tempest-ServerExternalEventsTest-33800306 tempest-ServerExternalEventsTest-33800306-project-member] Lock "23f2ad6c-ea98-4a32-a79a-75cec6fc925e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.408s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.779342] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780040, 'name': Rename_Task, 'duration_secs': 0.365854} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.779604] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.779889] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39efa81f-4e96-453f-ab1c-cf049d94a3de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.788973] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 708.788973] env[68244]: value = "task-2780041" [ 708.788973] env[68244]: _type = "Task" [ 708.788973] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.800715] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.831667] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c1083-9383-7ca0-39f7-028009b6e214, 'name': SearchDatastore_Task, 'duration_secs': 0.009826} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.831667] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15098a40-942c-4c75-b93b-918ffe9e8fee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.838787] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 708.838787] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52021ba9-f849-39a7-dbeb-25d0f823bd34" [ 708.838787] env[68244]: _type = "Task" [ 708.838787] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.850754] env[68244]: DEBUG oslo_concurrency.lockutils [req-283e2c97-1214-436c-8441-5473feea0ca2 req-6f4c129e-9638-482e-91f7-42dfa5d03376 service nova] Releasing lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.851393] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52021ba9-f849-39a7-dbeb-25d0f823bd34, 'name': SearchDatastore_Task, 'duration_secs': 0.01048} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.852543] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.852543] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aa7c6967-cd55-47fc-a2f5-db6e8d2e0307/aa7c6967-cd55-47fc-a2f5-db6e8d2e0307.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.852543] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8abb431-cdf4-4990-908c-9abb540b0452 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.859391] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 708.859391] env[68244]: value = "task-2780042" [ 708.859391] env[68244]: _type = "Task" [ 708.859391] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.869017] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.941922] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.222119] env[68244]: DEBUG nova.compute.manager [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.223822] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f5b7bc-225c-4bc6-98de-16f25ae28484 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.302360] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.376313] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780042, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.455633] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef848fa-ff6f-467c-8d20-0f82afc1306e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.468078] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f0e76a-83e9-417d-86a0-3983e63289ee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.511064] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92a5f51-1bae-42a4-b57f-0effa479fe42 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.519846] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c681aaad-2519-4206-8dfa-dc03d2fa5975 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.534863] env[68244]: DEBUG nova.compute.provider_tree [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.741802] env[68244]: INFO nova.compute.manager [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] instance snapshotting [ 709.746380] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c659fd-b325-4808-a3b9-4bfb7e68defb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.773095] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5c6aa8-a94a-4558-97f6-50e48b39e629 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.803035] env[68244]: DEBUG oslo_vmware.api [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780041, 'name': PowerOnVM_Task, 'duration_secs': 0.823758} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.803035] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.803035] env[68244]: DEBUG nova.compute.manager [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.803769] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22222da0-53d7-42f6-81a4-7c843372cc86 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.871698] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780042, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56287} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.872053] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aa7c6967-cd55-47fc-a2f5-db6e8d2e0307/aa7c6967-cd55-47fc-a2f5-db6e8d2e0307.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.872168] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.872369] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e53550c9-60a4-4e4e-95f0-11d2adc293de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.885562] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 709.885562] env[68244]: value = "task-2780043" [ 709.885562] env[68244]: _type = "Task" [ 709.885562] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.903340] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.042888] env[68244]: DEBUG nova.scheduler.client.report [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.289358] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 710.289358] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-739ba9b1-8a76-4566-8b91-0c517090f55c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.299485] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 710.299485] env[68244]: value = "task-2780045" [ 710.299485] env[68244]: _type = "Task" [ 710.299485] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.323658] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780045, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.324239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.395156] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114443} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.396521] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.396521] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c72c81-3626-493b-b9fc-237df754eb4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.424362] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] aa7c6967-cd55-47fc-a2f5-db6e8d2e0307/aa7c6967-cd55-47fc-a2f5-db6e8d2e0307.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.424705] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17e49038-1a98-4934-8ab8-6a256ac62a35 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.447658] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 710.447658] env[68244]: value = "task-2780046" [ 710.447658] env[68244]: _type = "Task" [ 710.447658] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.458937] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780046, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.548343] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.548928] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 710.552039] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.202s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.553537] env[68244]: INFO nova.compute.claims [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.814132] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780045, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.960813] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780046, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.062649] env[68244]: DEBUG nova.compute.utils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 711.065930] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 711.066145] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 711.069033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.069033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.069033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.069033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.069239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.071514] env[68244]: INFO nova.compute.manager [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Terminating instance [ 711.110086] env[68244]: DEBUG nova.policy [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ded03c2489d4b9a95b4f10a35eed6ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6539e5ec36f0484f85e61fa8b4ef3f9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 711.316735] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780045, 'name': CreateSnapshot_Task, 'duration_secs': 0.978915} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.317223] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 711.318105] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c660981-602b-44f1-b14b-6660b6c1e044 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.392318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.392318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.464501] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780046, 'name': ReconfigVM_Task, 'duration_secs': 0.537119} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.465212] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Reconfigured VM instance instance-00000013 to attach disk [datastore2] aa7c6967-cd55-47fc-a2f5-db6e8d2e0307/aa7c6967-cd55-47fc-a2f5-db6e8d2e0307.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.466172] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aadc2cac-a141-430d-acc4-987c6ed6b35b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.473149] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 711.473149] env[68244]: value = "task-2780047" [ 711.473149] env[68244]: _type = "Task" [ 711.473149] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.482925] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780047, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.569563] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 711.580969] env[68244]: DEBUG nova.compute.manager [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 711.581283] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 711.582286] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2f4fac-b08b-450d-91fa-f331b6d2aa1b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.596803] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 711.597125] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88838016-d93f-489f-a3ff-71b235f499b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.608034] env[68244]: DEBUG oslo_vmware.api [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 711.608034] env[68244]: value = "task-2780048" [ 711.608034] env[68244]: _type = "Task" [ 711.608034] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.612604] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Successfully created port: 30c5e2b9-47e2-45e5-9928-57cb1d81d076 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.626116] env[68244]: DEBUG oslo_vmware.api [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.843187] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 711.843852] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8eae155e-60a7-44da-8a57-21f2a4f53949 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.855035] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 711.855035] env[68244]: value = "task-2780049" [ 711.855035] env[68244]: _type = "Task" [ 711.855035] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.867906] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780049, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.990993] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780047, 'name': Rename_Task, 'duration_secs': 0.310516} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.992532] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.994386] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64137096-48cd-431b-8dd8-c05376a29df3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.011322] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 712.011322] env[68244]: value = "task-2780050" [ 712.011322] env[68244]: _type = "Task" [ 712.011322] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.026291] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.036741] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.037157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.118682] env[68244]: DEBUG oslo_vmware.api [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780048, 'name': PowerOffVM_Task, 'duration_secs': 0.277067} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.119046] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 712.119196] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 712.120069] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97981f15-de92-4704-bea4-faa026b5be34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.167345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad05e3ff-66ca-419c-af52-d9ed73cd0f34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.177409] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097191e8-49bd-4bbf-898e-7ee4d7c8bec5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.215736] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510a5a93-66fa-47a4-8b11-902eb4558c53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.225262] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2306c9-166c-4627-8a92-e7dff4302a1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.244370] env[68244]: DEBUG nova.compute.provider_tree [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.302126] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "f113bb6c-f05a-4253-98af-ca827fcbb723" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.302431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "f113bb6c-f05a-4253-98af-ca827fcbb723" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.365498] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780049, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.523883] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780050, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.591257] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 712.624411] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 712.624698] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.624915] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 712.625133] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.625350] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 712.625519] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 712.625738] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 712.625916] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 712.626097] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 712.626612] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 712.627035] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 712.628093] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29267c92-29fc-408b-8e17-cdac7fc9eeca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.638042] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a4f3d1-2046-461f-99a9-8adcf9d96e02 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.747893] env[68244]: DEBUG nova.scheduler.client.report [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.868581] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780049, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.029036] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780050, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.256752] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.257339] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 713.261486] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.558s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.264681] env[68244]: INFO nova.compute.claims [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.369890] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780049, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.527065] env[68244]: DEBUG oslo_vmware.api [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780050, 'name': PowerOnVM_Task, 'duration_secs': 1.489937} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.527065] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.527065] env[68244]: INFO nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Took 9.28 seconds to spawn the instance on the hypervisor. [ 713.527065] env[68244]: DEBUG nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 713.527065] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65776dac-badd-459f-87ce-a8c00160a2cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.584481] env[68244]: DEBUG nova.compute.manager [req-a6cb2e92-decf-4c98-91e1-a22117bd031a req-ac64dd44-9843-463f-b902-be7067930356 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Received event network-vif-plugged-30c5e2b9-47e2-45e5-9928-57cb1d81d076 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 713.584718] env[68244]: DEBUG oslo_concurrency.lockutils [req-a6cb2e92-decf-4c98-91e1-a22117bd031a req-ac64dd44-9843-463f-b902-be7067930356 service nova] Acquiring lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.584948] env[68244]: DEBUG oslo_concurrency.lockutils [req-a6cb2e92-decf-4c98-91e1-a22117bd031a req-ac64dd44-9843-463f-b902-be7067930356 service nova] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.585130] env[68244]: DEBUG oslo_concurrency.lockutils [req-a6cb2e92-decf-4c98-91e1-a22117bd031a req-ac64dd44-9843-463f-b902-be7067930356 service nova] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.585293] env[68244]: DEBUG nova.compute.manager [req-a6cb2e92-decf-4c98-91e1-a22117bd031a req-ac64dd44-9843-463f-b902-be7067930356 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] No waiting events found dispatching network-vif-plugged-30c5e2b9-47e2-45e5-9928-57cb1d81d076 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 713.585467] env[68244]: WARNING nova.compute.manager [req-a6cb2e92-decf-4c98-91e1-a22117bd031a req-ac64dd44-9843-463f-b902-be7067930356 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Received unexpected event network-vif-plugged-30c5e2b9-47e2-45e5-9928-57cb1d81d076 for instance with vm_state building and task_state spawning. [ 713.670236] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Successfully updated port: 30c5e2b9-47e2-45e5-9928-57cb1d81d076 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.773306] env[68244]: DEBUG nova.compute.utils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 713.776900] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 713.776900] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.865274] env[68244]: DEBUG nova.policy [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ded03c2489d4b9a95b4f10a35eed6ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6539e5ec36f0484f85e61fa8b4ef3f9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 713.873914] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780049, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.065049] env[68244]: INFO nova.compute.manager [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Took 37.78 seconds to build instance. [ 714.175272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "refresh_cache-fe873e92-1481-4c5f-b4ca-90e052bd10c0" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.175430] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "refresh_cache-fe873e92-1481-4c5f-b4ca-90e052bd10c0" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.175641] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.277450] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 714.292681] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Successfully created port: cab03f48-d93f-47d0-b1fa-0aec296b3713 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.367182] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780049, 'name': CloneVM_Task, 'duration_secs': 2.037966} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.367542] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Created linked-clone VM from snapshot [ 714.368384] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be498f2-c67b-472d-8aee-7785de14fe4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.383803] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Uploading image 22ed5a66-013d-474d-a5cd-af82cf79cd42 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 714.419416] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 714.419416] env[68244]: value = "vm-558945" [ 714.419416] env[68244]: _type = "VirtualMachine" [ 714.419416] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 714.419992] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a6c1db0a-966c-4684-815b-860c2dfafcbe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.432909] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lease: (returnval){ [ 714.432909] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52082ffe-ae70-1d64-5e05-f3cbd8655034" [ 714.432909] env[68244]: _type = "HttpNfcLease" [ 714.432909] env[68244]: } obtained for exporting VM: (result){ [ 714.432909] env[68244]: value = "vm-558945" [ 714.432909] env[68244]: _type = "VirtualMachine" [ 714.432909] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 714.433623] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the lease: (returnval){ [ 714.433623] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52082ffe-ae70-1d64-5e05-f3cbd8655034" [ 714.433623] env[68244]: _type = "HttpNfcLease" [ 714.433623] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 714.441581] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 714.441581] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52082ffe-ae70-1d64-5e05-f3cbd8655034" [ 714.441581] env[68244]: _type = "HttpNfcLease" [ 714.441581] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 714.568660] env[68244]: DEBUG oslo_concurrency.lockutils [None req-636c9ff4-1590-4df8-8497-8933e56fcfdc tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.945s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.756080] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.905276] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bb87b8-56a6-4ea7-aac4-f00f389637cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.918294] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1bbffb-7f6d-489a-ba5e-86a92f9caeb1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.963037] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7acf797-20d0-48e2-9dbe-6d24a39789d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.976308] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 714.976308] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52082ffe-ae70-1d64-5e05-f3cbd8655034" [ 714.976308] env[68244]: _type = "HttpNfcLease" [ 714.976308] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 714.976753] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 714.976753] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52082ffe-ae70-1d64-5e05-f3cbd8655034" [ 714.976753] env[68244]: _type = "HttpNfcLease" [ 714.976753] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 714.978145] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae54aec-d730-4766-b368-0aa43cd815c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.983896] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9569ad71-ed28-426f-a30a-8b0350cd8121 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.007668] env[68244]: DEBUG nova.compute.provider_tree [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.010258] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524bb25a-3f2e-cc64-cd4b-b4327c735ec0/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 715.010258] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524bb25a-3f2e-cc64-cd4b-b4327c735ec0/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 715.074639] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Updating instance_info_cache with network_info: [{"id": "30c5e2b9-47e2-45e5-9928-57cb1d81d076", "address": "fa:16:3e:9e:38:61", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30c5e2b9-47", "ovs_interfaceid": "30c5e2b9-47e2-45e5-9928-57cb1d81d076", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.075924] env[68244]: DEBUG nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 715.154631] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-178af485-c778-4502-866b-32e56a7abcca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.300372] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.427797] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 715.428163] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.428416] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.428702] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.432927] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.432927] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 715.432927] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 715.433138] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 715.433352] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 715.433491] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 715.433716] env[68244]: DEBUG nova.virt.hardware [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 715.434705] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8c400d-bb08-4cc0-b34a-79f2143d260d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.445381] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2e0c2a-e709-4d3d-b3f0-3edd32e1eaa9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.512207] env[68244]: DEBUG nova.scheduler.client.report [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.560802] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 715.561411] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 715.561878] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleting the datastore file [datastore2] d73f87d2-41b3-4396-b5b5-932f8c6bf626 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 715.562234] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-333e242f-f756-4a1a-85b8-d995938b01ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.571251] env[68244]: DEBUG oslo_vmware.api [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 715.571251] env[68244]: value = "task-2780055" [ 715.571251] env[68244]: _type = "Task" [ 715.571251] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.585693] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "refresh_cache-fe873e92-1481-4c5f-b4ca-90e052bd10c0" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.588422] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Instance network_info: |[{"id": "30c5e2b9-47e2-45e5-9928-57cb1d81d076", "address": "fa:16:3e:9e:38:61", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30c5e2b9-47", "ovs_interfaceid": "30c5e2b9-47e2-45e5-9928-57cb1d81d076", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 715.592430] env[68244]: DEBUG oslo_vmware.api [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.598140] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:38:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30c5e2b9-47e2-45e5-9928-57cb1d81d076', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.606879] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Creating folder: Project (6539e5ec36f0484f85e61fa8b4ef3f9c). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 715.608460] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1dda2b56-7b04-4c8b-9bf2-ff317db00373 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.623033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.626586] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Created folder: Project (6539e5ec36f0484f85e61fa8b4ef3f9c) in parent group-v558876. [ 715.626586] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Creating folder: Instances. Parent ref: group-v558947. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 715.626586] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e144f0c-257e-4bb6-b5e7-4e44b4af8bc4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.637154] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.638038] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.640501] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Created folder: Instances in parent group-v558947. [ 715.640640] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.640724] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 715.640916] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10213208-a060-4262-8029-5e695bf35307 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.670165] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.670165] env[68244]: value = "task-2780058" [ 715.670165] env[68244]: _type = "Task" [ 715.670165] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.682242] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780058, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.973686] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Successfully updated port: cab03f48-d93f-47d0-b1fa-0aec296b3713 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.019605] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.756s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.019605] env[68244]: DEBUG nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 716.028524] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.156s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.030662] env[68244]: DEBUG nova.objects.instance [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lazy-loading 'resources' on Instance uuid ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 716.078590] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52321e0f-d00e-2724-56c1-c97902025590/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 716.080032] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad63b6d0-1d67-4bfd-846e-164e46ea1d94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.091104] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52321e0f-d00e-2724-56c1-c97902025590/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 716.091286] env[68244]: ERROR oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52321e0f-d00e-2724-56c1-c97902025590/disk-0.vmdk due to incomplete transfer. [ 716.091541] env[68244]: DEBUG oslo_vmware.api [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260341} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.091782] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-009f7098-2840-4096-83e7-c7b477121c67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.093540] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 716.093865] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 716.093865] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 716.094117] env[68244]: INFO nova.compute.manager [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Took 4.51 seconds to destroy the instance on the hypervisor. [ 716.094254] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.095227] env[68244]: DEBUG nova.compute.manager [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 716.095314] env[68244]: DEBUG nova.network.neutron [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.104155] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52321e0f-d00e-2724-56c1-c97902025590/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 716.105072] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Uploaded image a31395b3-1f47-46e6-9150-8a7f532927d5 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 716.106593] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 716.106902] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1d2586e9-7313-4f6e-8353-6c8a3fbfe305 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.115229] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 716.115229] env[68244]: value = "task-2780059" [ 716.115229] env[68244]: _type = "Task" [ 716.115229] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.125970] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780059, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.181763] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780058, 'name': CreateVM_Task, 'duration_secs': 0.509215} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.182443] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 716.183124] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.183401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.185275] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 716.185535] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-526c7e5a-8135-4d8e-8d93-85ebcf3ae28e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.192876] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 716.192876] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5233baa2-f480-9238-208b-b5d2ec7bd1a4" [ 716.192876] env[68244]: _type = "Task" [ 716.192876] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.205335] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5233baa2-f480-9238-208b-b5d2ec7bd1a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.478062] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "refresh_cache-cb607c5e-797d-4e52-9ba4-66113718dacc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.478062] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "refresh_cache-cb607c5e-797d-4e52-9ba4-66113718dacc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.478062] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 716.536057] env[68244]: DEBUG nova.compute.utils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 716.536057] env[68244]: DEBUG nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 716.536057] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 716.593507] env[68244]: DEBUG nova.policy [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c5aa01e2cf846d38e5a5020fe0c8ed7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f7f8ce7041b4550a74a2fb6fe5b4c6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 716.631198] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780059, 'name': Destroy_Task, 'duration_secs': 0.409684} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.631497] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Destroyed the VM [ 716.631937] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 716.632718] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cceb9d12-7326-4801-bf70-2bc5b86fef4b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.643644] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 716.643644] env[68244]: value = "task-2780061" [ 716.643644] env[68244]: _type = "Task" [ 716.643644] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.652910] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780061, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.708466] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5233baa2-f480-9238-208b-b5d2ec7bd1a4, 'name': SearchDatastore_Task, 'duration_secs': 0.013274} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.717044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.717044] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 716.717044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.717044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.717288] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 716.717288] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebf72fde-4094-489e-bc50-8f0a665b8581 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.730646] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 716.730829] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 716.731642] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d636a171-1fef-4a02-a2af-aacdca118bc2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.740712] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 716.740712] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e21df0-1175-040b-a0aa-7d6f96b0d738" [ 716.740712] env[68244]: _type = "Task" [ 716.740712] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.756053] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e21df0-1175-040b-a0aa-7d6f96b0d738, 'name': SearchDatastore_Task, 'duration_secs': 0.013477} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.756956] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6c1d691-ed84-4ac0-90da-b3074e4a7bf2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.765216] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 716.765216] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dc5481-b42d-3b9c-5efd-6b00e955def2" [ 716.765216] env[68244]: _type = "Task" [ 716.765216] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.785841] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dc5481-b42d-3b9c-5efd-6b00e955def2, 'name': SearchDatastore_Task, 'duration_secs': 0.012817} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.785841] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.785841] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] fe873e92-1481-4c5f-b4ca-90e052bd10c0/fe873e92-1481-4c5f-b4ca-90e052bd10c0.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.785841] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5dc44f8-4d7b-41b4-95ae-b121f7b62036 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.791344] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 716.791344] env[68244]: value = "task-2780062" [ 716.791344] env[68244]: _type = "Task" [ 716.791344] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.802248] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780062, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.819434] env[68244]: DEBUG nova.compute.manager [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Received event network-changed-30c5e2b9-47e2-45e5-9928-57cb1d81d076 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 716.819852] env[68244]: DEBUG nova.compute.manager [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Refreshing instance network info cache due to event network-changed-30c5e2b9-47e2-45e5-9928-57cb1d81d076. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 716.819852] env[68244]: DEBUG oslo_concurrency.lockutils [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] Acquiring lock "refresh_cache-fe873e92-1481-4c5f-b4ca-90e052bd10c0" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.823076] env[68244]: DEBUG oslo_concurrency.lockutils [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] Acquired lock "refresh_cache-fe873e92-1481-4c5f-b4ca-90e052bd10c0" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.823076] env[68244]: DEBUG nova.network.neutron [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Refreshing network info cache for port 30c5e2b9-47e2-45e5-9928-57cb1d81d076 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.869365] env[68244]: DEBUG nova.network.neutron [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.945022] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Successfully created port: 289c59b7-4aa5-4a58-a62b-6b55b73f8255 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.040284] env[68244]: DEBUG nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 717.066245] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.160728] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780061, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.239294] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ba0126-0c4d-48df-b2d3-d3bc85aba505 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.255242] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdc3731-ba9f-475e-8449-2ee73909a469 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.305408] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37bc7d91-95b1-4b91-b1f9-5dbac0fe77f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.313145] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "f579141b-1fac-4541-99c3-07644a0a358c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.313477] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "f579141b-1fac-4541-99c3-07644a0a358c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.321290] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Successfully created port: 34e93d11-e4ad-463d-9400-d65db1e5a394 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.329150] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b26bc1d-64d8-4df5-aed7-105f8523ccfb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.335772] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780062, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.350697] env[68244]: DEBUG nova.compute.provider_tree [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.366966] env[68244]: DEBUG nova.network.neutron [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Updating instance_info_cache with network_info: [{"id": "cab03f48-d93f-47d0-b1fa-0aec296b3713", "address": "fa:16:3e:71:cc:7e", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcab03f48-d9", "ovs_interfaceid": "cab03f48-d93f-47d0-b1fa-0aec296b3713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.373335] env[68244]: INFO nova.compute.manager [-] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Took 1.28 seconds to deallocate network for instance. [ 717.659151] env[68244]: DEBUG oslo_vmware.api [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780061, 'name': RemoveSnapshot_Task, 'duration_secs': 0.560511} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.659814] env[68244]: DEBUG nova.network.neutron [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Updated VIF entry in instance network info cache for port 30c5e2b9-47e2-45e5-9928-57cb1d81d076. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.660104] env[68244]: DEBUG nova.network.neutron [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Updating instance_info_cache with network_info: [{"id": "30c5e2b9-47e2-45e5-9928-57cb1d81d076", "address": "fa:16:3e:9e:38:61", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30c5e2b9-47", "ovs_interfaceid": "30c5e2b9-47e2-45e5-9928-57cb1d81d076", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.661334] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 717.661562] env[68244]: INFO nova.compute.manager [None req-8cad34bd-be4f-4fbd-9963-17ddccc117c1 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Took 15.48 seconds to snapshot the instance on the hypervisor. [ 717.727133] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Successfully created port: f80f51a3-7747-4204-ae15-fbe07a3765ea {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.808159] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780062, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539714} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.808510] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] fe873e92-1481-4c5f-b4ca-90e052bd10c0/fe873e92-1481-4c5f-b4ca-90e052bd10c0.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 717.808806] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 717.809131] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bb839d7-6608-4140-9675-a963bde484d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.818560] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 717.818560] env[68244]: value = "task-2780063" [ 717.818560] env[68244]: _type = "Task" [ 717.818560] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.828773] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780063, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.855972] env[68244]: DEBUG nova.scheduler.client.report [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.869802] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "refresh_cache-cb607c5e-797d-4e52-9ba4-66113718dacc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.870177] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Instance network_info: |[{"id": "cab03f48-d93f-47d0-b1fa-0aec296b3713", "address": "fa:16:3e:71:cc:7e", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcab03f48-d9", "ovs_interfaceid": "cab03f48-d93f-47d0-b1fa-0aec296b3713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 717.870652] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:cc:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cab03f48-d93f-47d0-b1fa-0aec296b3713', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.879860] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.881402] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.881608] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 717.882123] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-147dbf0c-35d4-4769-a535-fffb9de68e8c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.905241] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.905241] env[68244]: value = "task-2780064" [ 717.905241] env[68244]: _type = "Task" [ 717.905241] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.915019] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780064, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.014301] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "f5724973-2349-481c-b2ba-d1287f09c1db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.014541] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "f5724973-2349-481c-b2ba-d1287f09c1db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.053396] env[68244]: DEBUG nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 718.075610] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 718.075859] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.076021] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 718.076222] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.076371] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 718.076604] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 718.076859] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 718.077060] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 718.077247] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 718.077439] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 718.077631] env[68244]: DEBUG nova.virt.hardware [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 718.078605] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796fa998-62bc-4115-87d6-fb55c68282c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.090919] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e87c36-2038-4100-b3dc-a12efe8e84e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.164731] env[68244]: DEBUG oslo_concurrency.lockutils [req-4131be9e-5a43-4686-819e-744842ee4591 req-18af7353-0413-4c0b-9e7a-cb51d9fab676 service nova] Releasing lock "refresh_cache-fe873e92-1481-4c5f-b4ca-90e052bd10c0" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.332341] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780063, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075052} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.332727] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 718.333667] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2fb4e5-cca2-4f7e-b97c-112e152eb8c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.359091] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] fe873e92-1481-4c5f-b4ca-90e052bd10c0/fe873e92-1481-4c5f-b4ca-90e052bd10c0.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 718.359400] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff06ea4d-23da-4c57-ad70-58852c3dfc8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.374429] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.349s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.376774] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.978s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.376979] env[68244]: DEBUG nova.objects.instance [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 718.387515] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 718.387515] env[68244]: value = "task-2780065" [ 718.387515] env[68244]: _type = "Task" [ 718.387515] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.397153] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780065, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.405623] env[68244]: INFO nova.scheduler.client.report [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Deleted allocations for instance ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207 [ 718.421681] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780064, 'name': CreateVM_Task, 'duration_secs': 0.48074} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.421855] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.422471] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.422656] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.422968] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 718.423271] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58685296-b832-4c54-aed0-fdc59f4b34f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.429797] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 718.429797] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bcf541-0008-6d8d-447e-a2edcc3c11fa" [ 718.429797] env[68244]: _type = "Task" [ 718.429797] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.439773] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bcf541-0008-6d8d-447e-a2edcc3c11fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.667235] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "03af8758-fba3-4173-b998-d9e6b3113f8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.667609] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.667952] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "03af8758-fba3-4173-b998-d9e6b3113f8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.668276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.668583] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.671110] env[68244]: INFO nova.compute.manager [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Terminating instance [ 718.908116] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.916649] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dbd4fa0-3672-4739-8f4b-85a9f3c4a77e tempest-ServerDiagnosticsV248Test-844497569 tempest-ServerDiagnosticsV248Test-844497569-project-member] Lock "ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.629s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.943947] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bcf541-0008-6d8d-447e-a2edcc3c11fa, 'name': SearchDatastore_Task, 'duration_secs': 0.011601} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.945375] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.945986] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.946257] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.946650] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.946787] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.947456] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a1f4065-2b9d-43eb-88d1-e019855290b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.958970] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.959267] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.960119] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36963664-5ee8-4b8f-af79-438f2efe86dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.967344] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 718.967344] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b88bf5-d11e-9669-3738-0eb4696eb9c2" [ 718.967344] env[68244]: _type = "Task" [ 718.967344] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.979382] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b88bf5-d11e-9669-3738-0eb4696eb9c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.177036] env[68244]: DEBUG nova.compute.manager [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 719.177036] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.177036] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd9b7c0-cea5-41a1-93a7-1d9e78d4b073 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.186730] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 719.187049] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-183b2236-7fe2-4564-9cdf-5155b7b77001 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.263816] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 719.264039] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 719.264266] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleting the datastore file [datastore2] 03af8758-fba3-4173-b998-d9e6b3113f8c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 719.264564] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12a121d8-c5c9-43ca-b823-40514813cddf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.272691] env[68244]: DEBUG oslo_vmware.api [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 719.272691] env[68244]: value = "task-2780068" [ 719.272691] env[68244]: _type = "Task" [ 719.272691] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.282041] env[68244]: DEBUG oslo_vmware.api [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.344486] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "100ec1f9-6776-4832-a4c2-e9a4def0d350" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.344718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.381495] env[68244]: DEBUG nova.compute.manager [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Received event network-vif-plugged-cab03f48-d93f-47d0-b1fa-0aec296b3713 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 719.381730] env[68244]: DEBUG oslo_concurrency.lockutils [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] Acquiring lock "cb607c5e-797d-4e52-9ba4-66113718dacc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.382007] env[68244]: DEBUG oslo_concurrency.lockutils [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.382832] env[68244]: DEBUG oslo_concurrency.lockutils [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.383191] env[68244]: DEBUG nova.compute.manager [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] No waiting events found dispatching network-vif-plugged-cab03f48-d93f-47d0-b1fa-0aec296b3713 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 719.383569] env[68244]: WARNING nova.compute.manager [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Received unexpected event network-vif-plugged-cab03f48-d93f-47d0-b1fa-0aec296b3713 for instance with vm_state building and task_state spawning. [ 719.383935] env[68244]: DEBUG nova.compute.manager [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Received event network-changed-cab03f48-d93f-47d0-b1fa-0aec296b3713 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 719.384310] env[68244]: DEBUG nova.compute.manager [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Refreshing instance network info cache due to event network-changed-cab03f48-d93f-47d0-b1fa-0aec296b3713. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 719.384714] env[68244]: DEBUG oslo_concurrency.lockutils [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] Acquiring lock "refresh_cache-cb607c5e-797d-4e52-9ba4-66113718dacc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.384903] env[68244]: DEBUG oslo_concurrency.lockutils [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] Acquired lock "refresh_cache-cb607c5e-797d-4e52-9ba4-66113718dacc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.385240] env[68244]: DEBUG nova.network.neutron [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Refreshing network info cache for port cab03f48-d93f-47d0-b1fa-0aec296b3713 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.390063] env[68244]: DEBUG oslo_concurrency.lockutils [None req-452da87d-0c63-47c6-8a2c-c41d02a9e02d tempest-ServersAdmin275Test-1856505953 tempest-ServersAdmin275Test-1856505953-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.390063] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.592s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.391440] env[68244]: INFO nova.compute.claims [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.406951] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780065, 'name': ReconfigVM_Task, 'duration_secs': 0.586442} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.407273] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Reconfigured VM instance instance-00000014 to attach disk [datastore2] fe873e92-1481-4c5f-b4ca-90e052bd10c0/fe873e92-1481-4c5f-b4ca-90e052bd10c0.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 719.408447] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a075f202-8583-44e6-b2bf-a4e27ba9d252 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.417024] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 719.417024] env[68244]: value = "task-2780069" [ 719.417024] env[68244]: _type = "Task" [ 719.417024] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.427880] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780069, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.467942] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Successfully updated port: 289c59b7-4aa5-4a58-a62b-6b55b73f8255 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.481388] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b88bf5-d11e-9669-3738-0eb4696eb9c2, 'name': SearchDatastore_Task, 'duration_secs': 0.012146} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.481388] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fafdc7d6-942a-4c59-9c61-5b8679936e24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.488220] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 719.488220] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c078f-2258-9de2-7857-41560cc00be3" [ 719.488220] env[68244]: _type = "Task" [ 719.488220] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.498465] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c078f-2258-9de2-7857-41560cc00be3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.787977] env[68244]: DEBUG oslo_vmware.api [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166548} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.788315] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 719.788553] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 719.788777] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.788991] env[68244]: INFO nova.compute.manager [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 719.789277] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 719.789508] env[68244]: DEBUG nova.compute.manager [-] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 719.789632] env[68244]: DEBUG nova.network.neutron [-] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.930231] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780069, 'name': Rename_Task, 'duration_secs': 0.374555} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.930522] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.931267] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4fa8d84-c083-48a3-9aa9-d1a39a67f93b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.940202] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 719.940202] env[68244]: value = "task-2780070" [ 719.940202] env[68244]: _type = "Task" [ 719.940202] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.953433] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.004670] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c078f-2258-9de2-7857-41560cc00be3, 'name': SearchDatastore_Task, 'duration_secs': 0.012683} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.004956] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.005232] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cb607c5e-797d-4e52-9ba4-66113718dacc/cb607c5e-797d-4e52-9ba4-66113718dacc.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.005509] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f8ce52d-bc96-4690-9ca0-69803c1aeee6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.014495] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 720.014495] env[68244]: value = "task-2780071" [ 720.014495] env[68244]: _type = "Task" [ 720.014495] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.023811] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.276961] env[68244]: DEBUG nova.network.neutron [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Updated VIF entry in instance network info cache for port cab03f48-d93f-47d0-b1fa-0aec296b3713. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 720.278977] env[68244]: DEBUG nova.network.neutron [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Updating instance_info_cache with network_info: [{"id": "cab03f48-d93f-47d0-b1fa-0aec296b3713", "address": "fa:16:3e:71:cc:7e", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcab03f48-d9", "ovs_interfaceid": "cab03f48-d93f-47d0-b1fa-0aec296b3713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.455661] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780070, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.525937] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780071, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.740131] env[68244]: DEBUG nova.network.neutron [-] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.784682] env[68244]: DEBUG oslo_concurrency.lockutils [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] Releasing lock "refresh_cache-cb607c5e-797d-4e52-9ba4-66113718dacc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.785052] env[68244]: DEBUG nova.compute.manager [req-f2327a2e-f1f0-487e-b886-12fb0f820c74 req-4021a353-6b10-437d-be6f-dd555210a35d service nova] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Received event network-vif-deleted-91539a24-6b83-487e-9863-9e0ff0231dd9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 720.953223] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780070, 'name': PowerOnVM_Task, 'duration_secs': 0.989902} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.956066] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.956066] env[68244]: INFO nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Took 8.36 seconds to spawn the instance on the hypervisor. [ 720.956066] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.956066] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40bba3b-6480-42be-b0d1-35bd993cbab9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.021304] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb032d8d-f5b3-49bb-be62-1c3e0c5fbdbe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.034389] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9969a32-a4d9-4415-a197-4a68a6e86c82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.037818] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539551} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.038078] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cb607c5e-797d-4e52-9ba4-66113718dacc/cb607c5e-797d-4e52-9ba4-66113718dacc.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.038300] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.039680] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f1dd836-4c54-4910-8891-4954796ce001 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.075125] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75af22c0-b3a3-46d9-8ff9-86a28c8d2f93 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.086038] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 721.086038] env[68244]: value = "task-2780072" [ 721.086038] env[68244]: _type = "Task" [ 721.086038] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.087179] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005b262d-9816-4b30-8b9a-f398748622f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.108912] env[68244]: DEBUG nova.compute.provider_tree [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.112228] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780072, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.243841] env[68244]: INFO nova.compute.manager [-] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Took 1.45 seconds to deallocate network for instance. [ 721.473607] env[68244]: DEBUG nova.compute.manager [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-vif-plugged-289c59b7-4aa5-4a58-a62b-6b55b73f8255 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 721.473607] env[68244]: DEBUG oslo_concurrency.lockutils [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] Acquiring lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.473737] env[68244]: DEBUG oslo_concurrency.lockutils [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.473893] env[68244]: DEBUG oslo_concurrency.lockutils [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.474832] env[68244]: DEBUG nova.compute.manager [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] No waiting events found dispatching network-vif-plugged-289c59b7-4aa5-4a58-a62b-6b55b73f8255 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 721.474832] env[68244]: WARNING nova.compute.manager [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received unexpected event network-vif-plugged-289c59b7-4aa5-4a58-a62b-6b55b73f8255 for instance with vm_state building and task_state spawning. [ 721.474832] env[68244]: DEBUG nova.compute.manager [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-changed-289c59b7-4aa5-4a58-a62b-6b55b73f8255 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 721.474832] env[68244]: DEBUG nova.compute.manager [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Refreshing instance network info cache due to event network-changed-289c59b7-4aa5-4a58-a62b-6b55b73f8255. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 721.474832] env[68244]: DEBUG oslo_concurrency.lockutils [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] Acquiring lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.475323] env[68244]: DEBUG oslo_concurrency.lockutils [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] Acquired lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.475323] env[68244]: DEBUG nova.network.neutron [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Refreshing network info cache for port 289c59b7-4aa5-4a58-a62b-6b55b73f8255 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.482379] env[68244]: INFO nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Took 42.25 seconds to build instance. [ 721.604521] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780072, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075175} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.604868] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 721.605745] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afda5e4-2825-4263-b863-df6b206027eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.621875] env[68244]: DEBUG nova.scheduler.client.report [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.633961] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] cb607c5e-797d-4e52-9ba4-66113718dacc/cb607c5e-797d-4e52-9ba4-66113718dacc.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 721.634796] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-deec13d8-b3ec-4b9f-ac52-c5adade184d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.668040] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 721.668040] env[68244]: value = "task-2780073" [ 721.668040] env[68244]: _type = "Task" [ 721.668040] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.676775] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780073, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.752499] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.985024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.855s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.025268] env[68244]: DEBUG nova.network.neutron [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.124501] env[68244]: DEBUG nova.network.neutron [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.137401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.137756] env[68244]: DEBUG nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 722.144188] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.292s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.145747] env[68244]: INFO nova.compute.claims [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.178289] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780073, 'name': ReconfigVM_Task, 'duration_secs': 0.361361} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.180331] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Reconfigured VM instance instance-00000015 to attach disk [datastore2] cb607c5e-797d-4e52-9ba4-66113718dacc/cb607c5e-797d-4e52-9ba4-66113718dacc.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.180331] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-714575e6-3e65-42d9-9cd9-17a05da3ceaa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.191827] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 722.191827] env[68244]: value = "task-2780074" [ 722.191827] env[68244]: _type = "Task" [ 722.191827] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.201553] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780074, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.488043] env[68244]: DEBUG nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.574701] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Successfully updated port: 34e93d11-e4ad-463d-9400-d65db1e5a394 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 722.630959] env[68244]: DEBUG oslo_concurrency.lockutils [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] Releasing lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.631353] env[68244]: DEBUG nova.compute.manager [req-2258576a-7782-4cff-9ff8-381cf9f862c4 req-62d6f73d-4690-4e8c-b75e-3cc231842e84 service nova] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Received event network-vif-deleted-a53c4339-1f00-4439-b65a-0583dcb486a9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 722.658026] env[68244]: DEBUG nova.compute.utils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 722.663867] env[68244]: DEBUG nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 722.664648] env[68244]: DEBUG nova.network.neutron [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.702886] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780074, 'name': Rename_Task, 'duration_secs': 0.170309} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.703593] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 722.703878] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbf8bd50-286d-42cd-b2db-899442db0d3c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.711193] env[68244]: DEBUG nova.compute.manager [req-50b97953-c59f-43cb-a28e-ea01a72dfa92 req-5a9ff1fa-87de-4188-bc0e-d6123a0ab2db service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-vif-plugged-34e93d11-e4ad-463d-9400-d65db1e5a394 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 722.711193] env[68244]: DEBUG oslo_concurrency.lockutils [req-50b97953-c59f-43cb-a28e-ea01a72dfa92 req-5a9ff1fa-87de-4188-bc0e-d6123a0ab2db service nova] Acquiring lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.711193] env[68244]: DEBUG oslo_concurrency.lockutils [req-50b97953-c59f-43cb-a28e-ea01a72dfa92 req-5a9ff1fa-87de-4188-bc0e-d6123a0ab2db service nova] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.711193] env[68244]: DEBUG oslo_concurrency.lockutils [req-50b97953-c59f-43cb-a28e-ea01a72dfa92 req-5a9ff1fa-87de-4188-bc0e-d6123a0ab2db service nova] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.711193] env[68244]: DEBUG nova.compute.manager [req-50b97953-c59f-43cb-a28e-ea01a72dfa92 req-5a9ff1fa-87de-4188-bc0e-d6123a0ab2db service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] No waiting events found dispatching network-vif-plugged-34e93d11-e4ad-463d-9400-d65db1e5a394 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 722.711487] env[68244]: WARNING nova.compute.manager [req-50b97953-c59f-43cb-a28e-ea01a72dfa92 req-5a9ff1fa-87de-4188-bc0e-d6123a0ab2db service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received unexpected event network-vif-plugged-34e93d11-e4ad-463d-9400-d65db1e5a394 for instance with vm_state building and task_state spawning. [ 722.714603] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 722.714603] env[68244]: value = "task-2780075" [ 722.714603] env[68244]: _type = "Task" [ 722.714603] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.724507] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780075, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.754024] env[68244]: DEBUG nova.policy [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7dc24a60ae364e9a991bac51bd3ba9d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec5340c2b2a440d0ad5a75fd694ad71a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 722.935146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Acquiring lock "828865d7-d06a-4683-9149-987e6d9efbd9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.935146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "828865d7-d06a-4683-9149-987e6d9efbd9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.017565] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.164722] env[68244]: DEBUG nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 723.239643] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780075, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.297181] env[68244]: DEBUG nova.network.neutron [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Successfully created port: 817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.733144] env[68244]: DEBUG oslo_vmware.api [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780075, 'name': PowerOnVM_Task, 'duration_secs': 0.530135} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.733300] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 723.733500] env[68244]: INFO nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Took 8.43 seconds to spawn the instance on the hypervisor. [ 723.733667] env[68244]: DEBUG nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 723.734571] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05518732-bdbe-43f8-9f1c-1e966ede9c26 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.773621] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4d7d86-5625-4d8e-87d6-14f2714baa3f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.785469] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63215c4-caa1-4b08-b313-2786aa5eb825 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.819932] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667e25bb-ac5a-4807-a10d-a138f94dae34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.829349] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97960ce9-648b-4d70-93b7-224260899a70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.845224] env[68244]: DEBUG nova.compute.provider_tree [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.179178] env[68244]: DEBUG nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 724.209116] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 724.209393] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.209540] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.209849] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.209931] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.210380] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 724.210825] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 724.210917] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 724.211022] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 724.211190] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 724.211368] env[68244]: DEBUG nova.virt.hardware [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 724.212518] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82a999c-d25e-4cb4-8c9a-b81591ec0f24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.229051] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d926e1a-31b2-4e1f-8686-93b1d70edbe4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.259045] env[68244]: INFO nova.compute.manager [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Took 43.93 seconds to build instance. [ 724.348604] env[68244]: DEBUG nova.scheduler.client.report [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 724.765281] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c20ff2a-ebe6-497f-8e05-5cc1d040a246 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.593s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.783923] env[68244]: DEBUG nova.compute.manager [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-changed-34e93d11-e4ad-463d-9400-d65db1e5a394 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 724.784132] env[68244]: DEBUG nova.compute.manager [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Refreshing instance network info cache due to event network-changed-34e93d11-e4ad-463d-9400-d65db1e5a394. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 724.784501] env[68244]: DEBUG oslo_concurrency.lockutils [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] Acquiring lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.784632] env[68244]: DEBUG oslo_concurrency.lockutils [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] Acquired lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.784789] env[68244]: DEBUG nova.network.neutron [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Refreshing network info cache for port 34e93d11-e4ad-463d-9400-d65db1e5a394 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.857415] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.717s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.858432] env[68244]: DEBUG nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.866189] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.730s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.866447] env[68244]: DEBUG nova.objects.instance [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lazy-loading 'resources' on Instance uuid c662b964-abc9-41af-85fd-ea1a540e1e23 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 724.912099] env[68244]: DEBUG nova.compute.manager [req-0d8a2a6e-8f3f-4aa0-9edf-6b106f714894 req-239a1e1d-a189-4fc5-9b51-1dea382ab3ba service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-vif-plugged-f80f51a3-7747-4204-ae15-fbe07a3765ea {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 724.912237] env[68244]: DEBUG oslo_concurrency.lockutils [req-0d8a2a6e-8f3f-4aa0-9edf-6b106f714894 req-239a1e1d-a189-4fc5-9b51-1dea382ab3ba service nova] Acquiring lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.913903] env[68244]: DEBUG oslo_concurrency.lockutils [req-0d8a2a6e-8f3f-4aa0-9edf-6b106f714894 req-239a1e1d-a189-4fc5-9b51-1dea382ab3ba service nova] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.913903] env[68244]: DEBUG oslo_concurrency.lockutils [req-0d8a2a6e-8f3f-4aa0-9edf-6b106f714894 req-239a1e1d-a189-4fc5-9b51-1dea382ab3ba service nova] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.913903] env[68244]: DEBUG nova.compute.manager [req-0d8a2a6e-8f3f-4aa0-9edf-6b106f714894 req-239a1e1d-a189-4fc5-9b51-1dea382ab3ba service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] No waiting events found dispatching network-vif-plugged-f80f51a3-7747-4204-ae15-fbe07a3765ea {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 724.913903] env[68244]: WARNING nova.compute.manager [req-0d8a2a6e-8f3f-4aa0-9edf-6b106f714894 req-239a1e1d-a189-4fc5-9b51-1dea382ab3ba service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received unexpected event network-vif-plugged-f80f51a3-7747-4204-ae15-fbe07a3765ea for instance with vm_state building and task_state spawning. [ 724.977738] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Successfully updated port: f80f51a3-7747-4204-ae15-fbe07a3765ea {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.143630] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.144240] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.144240] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.144372] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.144531] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.150363] env[68244]: INFO nova.compute.manager [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Terminating instance [ 725.229902] env[68244]: DEBUG nova.network.neutron [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Successfully updated port: 817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.236742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "cb607c5e-797d-4e52-9ba4-66113718dacc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.237131] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.237463] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "cb607c5e-797d-4e52-9ba4-66113718dacc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.237705] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.237945] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.240938] env[68244]: INFO nova.compute.manager [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Terminating instance [ 725.268248] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 725.360526] env[68244]: DEBUG nova.network.neutron [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.372276] env[68244]: DEBUG nova.compute.utils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 725.375130] env[68244]: DEBUG nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 725.375601] env[68244]: DEBUG nova.network.neutron [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.482547] env[68244]: DEBUG nova.policy [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2dce40a2ad8a46dcaf896e92a010e6ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6004a8c148a4dc3a3a370d043407f3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.484326] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.555945] env[68244]: DEBUG nova.network.neutron [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.656987] env[68244]: DEBUG nova.compute.manager [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 725.656987] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 725.656987] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f90cee-08d5-47b4-adab-aa2df90caa65 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.667787] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 725.668094] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd854644-1506-46cc-8607-6cb7fbc29aaa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.684217] env[68244]: DEBUG oslo_vmware.api [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 725.684217] env[68244]: value = "task-2780076" [ 725.684217] env[68244]: _type = "Task" [ 725.684217] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.699830] env[68244]: DEBUG oslo_vmware.api [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.737339] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.738380] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.738683] env[68244]: DEBUG nova.network.neutron [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.744895] env[68244]: DEBUG nova.compute.manager [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 725.745123] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 725.747812] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9113f11c-60f1-4b57-924e-472aa95ef8a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.764389] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 725.768814] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56f1fe71-fc83-4c7c-b8a5-b76d63d248d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.783555] env[68244]: DEBUG oslo_vmware.api [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 725.783555] env[68244]: value = "task-2780077" [ 725.783555] env[68244]: _type = "Task" [ 725.783555] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.798124] env[68244]: DEBUG oslo_vmware.api [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780077, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.807611] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.907486] env[68244]: DEBUG nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 725.987230] env[68244]: DEBUG nova.network.neutron [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Successfully created port: 642dfe35-bea6-4d0d-a44f-9eac38934526 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.045487] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524bb25a-3f2e-cc64-cd4b-b4327c735ec0/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 726.046509] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cd56e0-1574-4aa7-b4d9-d8cddb36c416 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.056498] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524bb25a-3f2e-cc64-cd4b-b4327c735ec0/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 726.056688] env[68244]: ERROR oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524bb25a-3f2e-cc64-cd4b-b4327c735ec0/disk-0.vmdk due to incomplete transfer. [ 726.056919] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-96af6111-4d08-4e8e-8301-5049efd7a788 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.060728] env[68244]: DEBUG oslo_concurrency.lockutils [req-292ffeeb-3dab-4b33-9de1-c3c59a6d677e req-980df57a-6419-46c8-9d79-0371ec7901d8 service nova] Releasing lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.060728] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.061234] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.065111] env[68244]: DEBUG oslo_vmware.rw_handles [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524bb25a-3f2e-cc64-cd4b-b4327c735ec0/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 726.065886] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Uploaded image 22ed5a66-013d-474d-a5cd-af82cf79cd42 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 726.067469] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 726.068082] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d93a1d4f-cc6b-442e-9ecb-f7c0d47f131f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.079346] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 726.079346] env[68244]: value = "task-2780078" [ 726.079346] env[68244]: _type = "Task" [ 726.079346] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.092568] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6880b5-39cc-415a-8be6-e2489da59148 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.095330] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780078, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.101186] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0d0a5f-a6dc-46c8-a456-115751433e2b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.136914] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2a9ba3-4a73-444b-b596-5f7e5a9b5092 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.147252] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3785af17-6678-4c18-a8c9-c7268a2298c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.162728] env[68244]: DEBUG nova.compute.provider_tree [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.196824] env[68244]: DEBUG oslo_vmware.api [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780076, 'name': PowerOffVM_Task, 'duration_secs': 0.290632} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.197140] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 726.197332] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 726.197634] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc022ecc-ef4e-421d-92be-0dd91dea0b9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.268634] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 726.269249] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 726.269523] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleting the datastore file [datastore2] fe873e92-1481-4c5f-b4ca-90e052bd10c0 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 726.269942] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f39bac7d-7ae1-4ae2-85f0-ff856997a86a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.280263] env[68244]: DEBUG oslo_vmware.api [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 726.280263] env[68244]: value = "task-2780080" [ 726.280263] env[68244]: _type = "Task" [ 726.280263] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.288094] env[68244]: DEBUG nova.network.neutron [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.295092] env[68244]: DEBUG oslo_vmware.api [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.302951] env[68244]: DEBUG oslo_vmware.api [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780077, 'name': PowerOffVM_Task, 'duration_secs': 0.231979} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.304206] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 726.304284] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 726.305700] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-146f73bc-2336-45f1-a985-2f37bb3eac8f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.346022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "340aa1e7-dc0a-4cba-8979-0c591830e9db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.346022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.377372] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 726.377676] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 726.377885] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleting the datastore file [datastore2] cb607c5e-797d-4e52-9ba4-66113718dacc {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 726.378184] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-535035d3-8099-44d9-bdf2-b26c7da3cb1b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.386203] env[68244]: DEBUG oslo_vmware.api [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 726.386203] env[68244]: value = "task-2780082" [ 726.386203] env[68244]: _type = "Task" [ 726.386203] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.396241] env[68244]: DEBUG oslo_vmware.api [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.593673] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780078, 'name': Destroy_Task, 'duration_secs': 0.427115} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.594232] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Destroyed the VM [ 726.594581] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 726.595897] env[68244]: DEBUG nova.network.neutron [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updating instance_info_cache with network_info: [{"id": "817af294-3a0f-4ead-9a86-ed1f635dc303", "address": "fa:16:3e:be:f2:fe", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap817af294-3a", "ovs_interfaceid": "817af294-3a0f-4ead-9a86-ed1f635dc303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.597962] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ccdfd683-394e-4443-ac56-d4c03fd99abc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.608689] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 726.608689] env[68244]: value = "task-2780083" [ 726.608689] env[68244]: _type = "Task" [ 726.608689] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.621392] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780083, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.623207] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.664841] env[68244]: DEBUG nova.scheduler.client.report [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.792216] env[68244]: DEBUG oslo_vmware.api [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146073} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.793080] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 726.793418] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 726.793756] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.794260] env[68244]: INFO nova.compute.manager [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 726.794693] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 726.795232] env[68244]: DEBUG nova.compute.manager [-] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 726.795665] env[68244]: DEBUG nova.network.neutron [-] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.903724] env[68244]: DEBUG nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 726.906008] env[68244]: DEBUG oslo_vmware.api [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142803} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.906588] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 726.906859] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 726.907074] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.907851] env[68244]: INFO nova.compute.manager [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Took 1.16 seconds to destroy the instance on the hypervisor. [ 726.907851] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 726.907851] env[68244]: DEBUG nova.compute.manager [-] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 726.907978] env[68244]: DEBUG nova.network.neutron [-] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.941666] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.941904] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.942068] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.942248] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.942388] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.942543] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.942747] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.942925] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.943105] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.943264] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.943445] env[68244]: DEBUG nova.virt.hardware [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.944330] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df10aaa0-0c4b-4680-8f33-cac304c0531d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.954777] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f763e18-58b4-4703-9d19-a89cab3ae214 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.101125] env[68244]: DEBUG nova.network.neutron [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Updating instance_info_cache with network_info: [{"id": "289c59b7-4aa5-4a58-a62b-6b55b73f8255", "address": "fa:16:3e:4c:ed:04", "network": {"id": "2aa8a3c1-66cd-4405-92ca-932c79a3ab37", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1004136721", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289c59b7-4a", "ovs_interfaceid": "289c59b7-4aa5-4a58-a62b-6b55b73f8255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "34e93d11-e4ad-463d-9400-d65db1e5a394", "address": "fa:16:3e:5d:20:ab", "network": {"id": "8f99087f-d6b6-4722-b1c3-9c2df51ef236", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-803038205", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e93d11-e4", "ovs_interfaceid": "34e93d11-e4ad-463d-9400-d65db1e5a394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "address": "fa:16:3e:77:54:ab", "network": {"id": "2aa8a3c1-66cd-4405-92ca-932c79a3ab37", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1004136721", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf80f51a3-77", "ovs_interfaceid": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.102615] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Releasing lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.102892] env[68244]: DEBUG nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Instance network_info: |[{"id": "817af294-3a0f-4ead-9a86-ed1f635dc303", "address": "fa:16:3e:be:f2:fe", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap817af294-3a", "ovs_interfaceid": "817af294-3a0f-4ead-9a86-ed1f635dc303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 727.103496] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:f2:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '817af294-3a0f-4ead-9a86-ed1f635dc303', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.111550] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Creating folder: Project (ec5340c2b2a440d0ad5a75fd694ad71a). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.111550] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fadd1d3-f2f7-4762-8bc9-b889f2859814 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.124482] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780083, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.129755] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Created folder: Project (ec5340c2b2a440d0ad5a75fd694ad71a) in parent group-v558876. [ 727.130609] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Creating folder: Instances. Parent ref: group-v558951. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.130609] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e4c3abe-fb55-429c-87ef-0ba45a85f7c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.143930] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Created folder: Instances in parent group-v558951. [ 727.144204] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 727.144480] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.144707] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-571887a7-7119-449f-a1e9-8a7723e31f21 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.169915] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.308s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.172609] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.526s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.174324] env[68244]: INFO nova.compute.claims [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.181201] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.181201] env[68244]: value = "task-2780086" [ 727.181201] env[68244]: _type = "Task" [ 727.181201] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.193956] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780086, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.204617] env[68244]: INFO nova.scheduler.client.report [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Deleted allocations for instance c662b964-abc9-41af-85fd-ea1a540e1e23 [ 727.293281] env[68244]: DEBUG nova.compute.manager [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Received event network-vif-plugged-817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 727.293281] env[68244]: DEBUG oslo_concurrency.lockutils [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] Acquiring lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.293646] env[68244]: DEBUG oslo_concurrency.lockutils [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.293694] env[68244]: DEBUG oslo_concurrency.lockutils [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.293883] env[68244]: DEBUG nova.compute.manager [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] No waiting events found dispatching network-vif-plugged-817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 727.294090] env[68244]: WARNING nova.compute.manager [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Received unexpected event network-vif-plugged-817af294-3a0f-4ead-9a86-ed1f635dc303 for instance with vm_state building and task_state spawning. [ 727.294371] env[68244]: DEBUG nova.compute.manager [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Received event network-changed-817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 727.294521] env[68244]: DEBUG nova.compute.manager [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Refreshing instance network info cache due to event network-changed-817af294-3a0f-4ead-9a86-ed1f635dc303. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 727.294643] env[68244]: DEBUG oslo_concurrency.lockutils [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] Acquiring lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.294796] env[68244]: DEBUG oslo_concurrency.lockutils [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] Acquired lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.295076] env[68244]: DEBUG nova.network.neutron [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Refreshing network info cache for port 817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.350498] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.350719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.541479] env[68244]: DEBUG nova.compute.manager [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-changed-f80f51a3-7747-4204-ae15-fbe07a3765ea {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 727.541479] env[68244]: DEBUG nova.compute.manager [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Refreshing instance network info cache due to event network-changed-f80f51a3-7747-4204-ae15-fbe07a3765ea. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 727.541479] env[68244]: DEBUG oslo_concurrency.lockutils [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] Acquiring lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.562154] env[68244]: DEBUG nova.network.neutron [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Successfully updated port: 642dfe35-bea6-4d0d-a44f-9eac38934526 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 727.604744] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Releasing lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.605189] env[68244]: DEBUG nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Instance network_info: |[{"id": "289c59b7-4aa5-4a58-a62b-6b55b73f8255", "address": "fa:16:3e:4c:ed:04", "network": {"id": "2aa8a3c1-66cd-4405-92ca-932c79a3ab37", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1004136721", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289c59b7-4a", "ovs_interfaceid": "289c59b7-4aa5-4a58-a62b-6b55b73f8255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "34e93d11-e4ad-463d-9400-d65db1e5a394", "address": "fa:16:3e:5d:20:ab", "network": {"id": "8f99087f-d6b6-4722-b1c3-9c2df51ef236", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-803038205", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e93d11-e4", "ovs_interfaceid": "34e93d11-e4ad-463d-9400-d65db1e5a394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "address": "fa:16:3e:77:54:ab", "network": {"id": "2aa8a3c1-66cd-4405-92ca-932c79a3ab37", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1004136721", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf80f51a3-77", "ovs_interfaceid": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 727.605804] env[68244]: DEBUG oslo_concurrency.lockutils [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] Acquired lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.605995] env[68244]: DEBUG nova.network.neutron [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Refreshing network info cache for port f80f51a3-7747-4204-ae15-fbe07a3765ea {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.607372] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:ed:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4aa1eda7-48b9-4fa2-af0b-94c718313af2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '289c59b7-4aa5-4a58-a62b-6b55b73f8255', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:20:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '877ed63d-906e-4bd5-a1fc-7e82d172d41e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34e93d11-e4ad-463d-9400-d65db1e5a394', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:54:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4aa1eda7-48b9-4fa2-af0b-94c718313af2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f80f51a3-7747-4204-ae15-fbe07a3765ea', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.621607] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Creating folder: Project (3f7f8ce7041b4550a74a2fb6fe5b4c6b). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.625402] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-beec6d08-d9cf-4905-a62c-2e142e57db16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.638337] env[68244]: DEBUG oslo_vmware.api [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780083, 'name': RemoveSnapshot_Task, 'duration_secs': 0.693692} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.638610] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 727.638847] env[68244]: INFO nova.compute.manager [None req-92996dd9-937f-4d57-ab32-9379c6264a54 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Took 17.89 seconds to snapshot the instance on the hypervisor. [ 727.644877] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Created folder: Project (3f7f8ce7041b4550a74a2fb6fe5b4c6b) in parent group-v558876. [ 727.644877] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Creating folder: Instances. Parent ref: group-v558954. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.644877] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f364353-59db-47a6-b6b4-4acd248fc5b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.655768] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Created folder: Instances in parent group-v558954. [ 727.656030] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 727.656229] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.656471] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75411526-edf7-43ae-ad60-3b060f1945c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.692459] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.692459] env[68244]: value = "task-2780089" [ 727.692459] env[68244]: _type = "Task" [ 727.692459] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.696124] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780086, 'name': CreateVM_Task, 'duration_secs': 0.372087} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.699648] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 727.700623] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.700757] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.701075] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 727.701652] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bf00731-3952-4656-a8bf-3b3efc80d641 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.707097] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780089, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.708570] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 727.708570] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52159305-224a-7b2f-95ef-65858bbd1190" [ 727.708570] env[68244]: _type = "Task" [ 727.708570] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.715157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8ec9673-7440-465e-aaaa-0c518231226c tempest-ServersAdmin275Test-965074400 tempest-ServersAdmin275Test-965074400-project-member] Lock "c662b964-abc9-41af-85fd-ea1a540e1e23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.905s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.724176] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52159305-224a-7b2f-95ef-65858bbd1190, 'name': SearchDatastore_Task, 'duration_secs': 0.010588} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.724393] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.724707] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.724847] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.724993] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.725194] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.725444] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f05bdeca-eb79-4dd9-8834-7941ee52021d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.741329] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.741519] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 727.742287] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9db4c7b-2fd6-423f-922c-8319e2668154 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.748938] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 727.748938] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528553f2-53f6-5237-7e5e-608fdd7789da" [ 727.748938] env[68244]: _type = "Task" [ 727.748938] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.759047] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528553f2-53f6-5237-7e5e-608fdd7789da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.860563] env[68244]: DEBUG nova.network.neutron [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Updated VIF entry in instance network info cache for port f80f51a3-7747-4204-ae15-fbe07a3765ea. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 727.860940] env[68244]: DEBUG nova.network.neutron [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Updating instance_info_cache with network_info: [{"id": "289c59b7-4aa5-4a58-a62b-6b55b73f8255", "address": "fa:16:3e:4c:ed:04", "network": {"id": "2aa8a3c1-66cd-4405-92ca-932c79a3ab37", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1004136721", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289c59b7-4a", "ovs_interfaceid": "289c59b7-4aa5-4a58-a62b-6b55b73f8255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "34e93d11-e4ad-463d-9400-d65db1e5a394", "address": "fa:16:3e:5d:20:ab", "network": {"id": "8f99087f-d6b6-4722-b1c3-9c2df51ef236", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-803038205", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e93d11-e4", "ovs_interfaceid": "34e93d11-e4ad-463d-9400-d65db1e5a394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "address": "fa:16:3e:77:54:ab", "network": {"id": "2aa8a3c1-66cd-4405-92ca-932c79a3ab37", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1004136721", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf80f51a3-77", "ovs_interfaceid": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.945565] env[68244]: DEBUG nova.network.neutron [-] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.952448] env[68244]: DEBUG nova.network.neutron [-] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.062220] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "refresh_cache-f270caad-1b02-4d5b-a435-37b77c05c4e7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.062379] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired lock "refresh_cache-f270caad-1b02-4d5b-a435-37b77c05c4e7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.062575] env[68244]: DEBUG nova.network.neutron [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.182966] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "085b318d-e704-46f9-89a6-679b8aa49f85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.183209] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "085b318d-e704-46f9-89a6-679b8aa49f85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.205406] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780089, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.259583] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528553f2-53f6-5237-7e5e-608fdd7789da, 'name': SearchDatastore_Task, 'duration_secs': 0.011461} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.262972] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0005e46-c5d2-4e24-9844-a57fcb86004e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.269679] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 728.269679] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cbe0f3-6f26-db41-e5b6-dfd95192b805" [ 728.269679] env[68244]: _type = "Task" [ 728.269679] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.282237] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cbe0f3-6f26-db41-e5b6-dfd95192b805, 'name': SearchDatastore_Task, 'duration_secs': 0.010336} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.282488] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.282789] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1/8f0e60c8-7029-4dd5-b615-aa2b5d115aa1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 728.282995] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9fa2718-4383-428c-842b-5e9372dd26c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.294168] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 728.294168] env[68244]: value = "task-2780090" [ 728.294168] env[68244]: _type = "Task" [ 728.294168] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.305065] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.363229] env[68244]: DEBUG oslo_concurrency.lockutils [req-2a81e624-a122-4b80-823a-aae16fc41cc7 req-14b76620-f15b-4f99-acb6-6202daa2aec9 service nova] Releasing lock "refresh_cache-fd4d5494-042b-457e-a826-dee4d87c0032" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.415126] env[68244]: DEBUG nova.network.neutron [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updated VIF entry in instance network info cache for port 817af294-3a0f-4ead-9a86-ed1f635dc303. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 728.415475] env[68244]: DEBUG nova.network.neutron [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updating instance_info_cache with network_info: [{"id": "817af294-3a0f-4ead-9a86-ed1f635dc303", "address": "fa:16:3e:be:f2:fe", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap817af294-3a", "ovs_interfaceid": "817af294-3a0f-4ead-9a86-ed1f635dc303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.449430] env[68244]: INFO nova.compute.manager [-] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Took 1.65 seconds to deallocate network for instance. [ 728.456896] env[68244]: INFO nova.compute.manager [-] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Took 1.55 seconds to deallocate network for instance. [ 728.657609] env[68244]: DEBUG nova.network.neutron [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.712069] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780089, 'name': CreateVM_Task, 'duration_secs': 0.554309} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.712252] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 728.713238] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.713397] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.713717] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 728.714025] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7b155eb-2c4b-4858-a76e-1a70197aefeb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.722653] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 728.722653] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d9c0ac-02a6-82a0-6c9b-f85dde5bde66" [ 728.722653] env[68244]: _type = "Task" [ 728.722653] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.738229] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d9c0ac-02a6-82a0-6c9b-f85dde5bde66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.751549] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576c645e-b48f-4a8d-b37a-f9cc06c05e6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.761084] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c228c80-9b39-411c-8160-c6e49d737d86 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.537199] env[68244]: DEBUG oslo_concurrency.lockutils [req-681a8ce5-4d54-4ce6-8ee7-b2d7f24c80e2 req-a2036a47-5201-4099-992d-4aa8b1bd4c71 service nova] Releasing lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.538239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.538995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.551659] env[68244]: DEBUG nova.compute.manager [req-73854c79-14b3-419b-ab56-90747c600e4a req-9440d3c8-aa81-42c0-9ec1-0eca0c5211d6 service nova] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Received event network-vif-deleted-30c5e2b9-47e2-45e5-9928-57cb1d81d076 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 729.551659] env[68244]: DEBUG nova.compute.manager [req-73854c79-14b3-419b-ab56-90747c600e4a req-9440d3c8-aa81-42c0-9ec1-0eca0c5211d6 service nova] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Received event network-vif-deleted-cab03f48-d93f-47d0-b1fa-0aec296b3713 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 729.554156] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6527f463-82bf-46ee-9d01-6461d22e958e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.556864] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "6915d271-8346-41b5-a75b-2188fd3b57d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.557081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.565250] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512623} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.569281] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1/8f0e60c8-7029-4dd5-b615-aa2b5d115aa1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 729.569495] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 729.572021] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d9c0ac-02a6-82a0-6c9b-f85dde5bde66, 'name': SearchDatastore_Task, 'duration_secs': 0.056861} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.572021] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36e6080e-bfff-4365-8ac8-24f4623b9667 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.572222] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.572534] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 729.572762] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.572902] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.573082] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 729.574280] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15bdaf4-da71-4ee0-96a4-8436fdc5e95c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.577960] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f777cba-95e1-4de9-9cfd-a334efc989e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.582767] env[68244]: DEBUG nova.compute.manager [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Received event network-vif-plugged-642dfe35-bea6-4d0d-a44f-9eac38934526 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 729.582977] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] Acquiring lock "f270caad-1b02-4d5b-a435-37b77c05c4e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.583197] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.583357] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.583514] env[68244]: DEBUG nova.compute.manager [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] No waiting events found dispatching network-vif-plugged-642dfe35-bea6-4d0d-a44f-9eac38934526 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 729.583734] env[68244]: WARNING nova.compute.manager [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Received unexpected event network-vif-plugged-642dfe35-bea6-4d0d-a44f-9eac38934526 for instance with vm_state building and task_state spawning. [ 729.583813] env[68244]: DEBUG nova.compute.manager [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Received event network-changed-642dfe35-bea6-4d0d-a44f-9eac38934526 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 729.583949] env[68244]: DEBUG nova.compute.manager [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Refreshing instance network info cache due to event network-changed-642dfe35-bea6-4d0d-a44f-9eac38934526. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 729.584123] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] Acquiring lock "refresh_cache-f270caad-1b02-4d5b-a435-37b77c05c4e7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.593706] env[68244]: DEBUG nova.compute.provider_tree [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.599674] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 729.599674] env[68244]: value = "task-2780091" [ 729.599674] env[68244]: _type = "Task" [ 729.599674] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.599674] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.601596] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.605439] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b449354-4706-4742-ade7-fdc95aeede57 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.616228] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 729.616228] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5230e219-781a-ed6d-885b-a5760c87c90b" [ 729.616228] env[68244]: _type = "Task" [ 729.616228] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.619419] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780091, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.631698] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5230e219-781a-ed6d-885b-a5760c87c90b, 'name': SearchDatastore_Task, 'duration_secs': 0.010666} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.632482] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-644c29d6-1af5-4871-b980-75c59681f388 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.639351] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 729.639351] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521311f0-6442-3fd4-4f9e-64b8e148fa84" [ 729.639351] env[68244]: _type = "Task" [ 729.639351] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.649565] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521311f0-6442-3fd4-4f9e-64b8e148fa84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.659718] env[68244]: DEBUG nova.network.neutron [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Updating instance_info_cache with network_info: [{"id": "642dfe35-bea6-4d0d-a44f-9eac38934526", "address": "fa:16:3e:c7:73:83", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.120", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap642dfe35-be", "ovs_interfaceid": "642dfe35-bea6-4d0d-a44f-9eac38934526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.104347] env[68244]: DEBUG nova.scheduler.client.report [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.121060] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780091, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075749} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.121918] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 730.122705] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f382eb52-d3ee-42a0-9443-1624b197d4f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.148162] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1/8f0e60c8-7029-4dd5-b615-aa2b5d115aa1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 730.148902] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-631499c4-129a-4bc2-b772-72e5fb2db2ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.168444] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Releasing lock "refresh_cache-f270caad-1b02-4d5b-a435-37b77c05c4e7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.168807] env[68244]: DEBUG nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Instance network_info: |[{"id": "642dfe35-bea6-4d0d-a44f-9eac38934526", "address": "fa:16:3e:c7:73:83", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.120", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap642dfe35-be", "ovs_interfaceid": "642dfe35-bea6-4d0d-a44f-9eac38934526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 730.169598] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] Acquired lock "refresh_cache-f270caad-1b02-4d5b-a435-37b77c05c4e7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.169783] env[68244]: DEBUG nova.network.neutron [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Refreshing network info cache for port 642dfe35-bea6-4d0d-a44f-9eac38934526 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.170875] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:73:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '642dfe35-bea6-4d0d-a44f-9eac38934526', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.178305] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.179637] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.180485] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29710a55-549e-4d0f-acd6-74eec0092179 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.200458] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521311f0-6442-3fd4-4f9e-64b8e148fa84, 'name': SearchDatastore_Task, 'duration_secs': 0.010012} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.200744] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 730.200744] env[68244]: value = "task-2780092" [ 730.200744] env[68244]: _type = "Task" [ 730.200744] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.201417] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.201912] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] fd4d5494-042b-457e-a826-dee4d87c0032/fd4d5494-042b-457e-a826-dee4d87c0032.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 730.202051] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccc148f5-2bfd-47a3-9ae0-8f615e34ae14 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.208601] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.208601] env[68244]: value = "task-2780093" [ 730.208601] env[68244]: _type = "Task" [ 730.208601] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.219187] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 730.219187] env[68244]: value = "task-2780094" [ 730.219187] env[68244]: _type = "Task" [ 730.219187] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.219416] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.226736] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780093, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.232643] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.613192] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.438s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.613192] env[68244]: DEBUG nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 730.615024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.904s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.615431] env[68244]: DEBUG nova.objects.instance [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lazy-loading 'resources' on Instance uuid 57504eac-0d7f-4fbe-b08c-6864713cca94 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.713288] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.727670] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780093, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.734597] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780094, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512071} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.734597] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] fd4d5494-042b-457e-a826-dee4d87c0032/fd4d5494-042b-457e-a826-dee4d87c0032.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 730.735161] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 730.735654] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73c82210-5db6-4dbb-9e31-516df774feee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.744889] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 730.744889] env[68244]: value = "task-2780095" [ 730.744889] env[68244]: _type = "Task" [ 730.744889] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.756598] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780095, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.954858] env[68244]: DEBUG nova.network.neutron [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Updated VIF entry in instance network info cache for port 642dfe35-bea6-4d0d-a44f-9eac38934526. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 730.955120] env[68244]: DEBUG nova.network.neutron [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Updating instance_info_cache with network_info: [{"id": "642dfe35-bea6-4d0d-a44f-9eac38934526", "address": "fa:16:3e:c7:73:83", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.120", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap642dfe35-be", "ovs_interfaceid": "642dfe35-bea6-4d0d-a44f-9eac38934526", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.042466] env[68244]: DEBUG nova.compute.manager [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.043372] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73945509-825b-43fc-b3b0-8a2c2508a4f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.123031] env[68244]: DEBUG nova.compute.utils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 731.128361] env[68244]: DEBUG nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 731.128548] env[68244]: DEBUG nova.network.neutron [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.196082] env[68244]: DEBUG nova.policy [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fa4f9c8b0f1450788cd56a89e23d93a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a581fe596ee49c6b66f17d1ed11d120', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 731.215357] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780092, 'name': ReconfigVM_Task, 'duration_secs': 0.646958} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.218348] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1/8f0e60c8-7029-4dd5-b615-aa2b5d115aa1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 731.218959] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc82acc4-c946-4833-bf36-3f8c04f1c593 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.228460] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780093, 'name': CreateVM_Task, 'duration_secs': 0.61606} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.230592] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 731.230592] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 731.230592] env[68244]: value = "task-2780096" [ 731.230592] env[68244]: _type = "Task" [ 731.230592] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.230592] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.230807] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.230985] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 731.231290] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c90c334-eddb-4e13-bf1e-cd54066fe233 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.241545] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 731.241545] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527adf48-1b3e-52cf-f5ab-3b40d30e7da2" [ 731.241545] env[68244]: _type = "Task" [ 731.241545] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.244640] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780096, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.257849] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780095, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073871} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.261800] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.262555] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527adf48-1b3e-52cf-f5ab-3b40d30e7da2, 'name': SearchDatastore_Task, 'duration_secs': 0.009882} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.263747] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ced15e-aafb-4bb9-a05f-66fac1409c0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.265932] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.266378] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.266652] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.266795] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.266975] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.269800] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bbb8fac-3281-401a-a4b2-c44fb04e5be8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.297038] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] fd4d5494-042b-457e-a826-dee4d87c0032/fd4d5494-042b-457e-a826-dee4d87c0032.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.300873] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2071fdd-94c7-4799-892e-db847c4bd14b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.315258] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.315620] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 731.317241] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96557163-6672-4e7b-8ba7-32894c0314b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.323955] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 731.323955] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52030a6b-4806-ecb7-af2d-7ac13af14cb2" [ 731.323955] env[68244]: _type = "Task" [ 731.323955] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.327385] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 731.327385] env[68244]: value = "task-2780097" [ 731.327385] env[68244]: _type = "Task" [ 731.327385] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.336024] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52030a6b-4806-ecb7-af2d-7ac13af14cb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.344204] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780097, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.457864] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e590cdc-d862-4ee0-bfae-e8472df3d107 req-9e852d9b-a729-4ae3-a685-0e2f12d808ab service nova] Releasing lock "refresh_cache-f270caad-1b02-4d5b-a435-37b77c05c4e7" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.568154] env[68244]: INFO nova.compute.manager [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] instance snapshotting [ 731.570078] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852dd9dc-23a4-455c-af04-9c98becb06a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.594329] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb42847-7e07-437b-97bc-b3463924b3ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.626886] env[68244]: DEBUG nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 731.649133] env[68244]: DEBUG nova.network.neutron [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Successfully created port: d6df42d7-2b90-4e9a-a9cc-15adae4310a1 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 731.743923] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780096, 'name': Rename_Task, 'duration_secs': 0.151277} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.744871] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 731.745143] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9e94080-fe0a-4996-8f89-0d5b002d1bc5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.756101] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 731.756101] env[68244]: value = "task-2780098" [ 731.756101] env[68244]: _type = "Task" [ 731.756101] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.766346] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.777527] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4718cfc5-4e31-490c-9354-26e4c18e39af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.786069] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6798ce-9c03-47d9-9fba-da037b0939ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.821027] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d1f93c-ad40-45cd-8c3e-56c1179fdfc1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.836592] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52030a6b-4806-ecb7-af2d-7ac13af14cb2, 'name': SearchDatastore_Task, 'duration_secs': 0.011763} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.839826] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6ef898-248d-4fb3-9a2c-d2315fb78269 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.849406] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d711930-e304-4817-b3ac-ba114380997f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.852093] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780097, 'name': ReconfigVM_Task, 'duration_secs': 0.292251} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.852716] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Reconfigured VM instance instance-00000016 to attach disk [datastore2] fd4d5494-042b-457e-a826-dee4d87c0032/fd4d5494-042b-457e-a826-dee4d87c0032.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 731.853647] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdcb01b9-f673-42b7-a7f1-3089140f45aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.871316] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 731.871316] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f294c9-23d7-3932-7d62-7e7f15cdcfae" [ 731.871316] env[68244]: _type = "Task" [ 731.871316] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.871947] env[68244]: DEBUG nova.compute.provider_tree [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.881088] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 731.881088] env[68244]: value = "task-2780099" [ 731.881088] env[68244]: _type = "Task" [ 731.881088] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.885856] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f294c9-23d7-3932-7d62-7e7f15cdcfae, 'name': SearchDatastore_Task, 'duration_secs': 0.012332} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.889407] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.889679] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f270caad-1b02-4d5b-a435-37b77c05c4e7/f270caad-1b02-4d5b-a435-37b77c05c4e7.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 731.890281] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5901b6e2-001f-4189-90db-689a17d97667 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.899491] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780099, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.900909] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 731.900909] env[68244]: value = "task-2780100" [ 731.900909] env[68244]: _type = "Task" [ 731.900909] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.912144] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.112456] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 732.112852] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-08fb5038-70d3-4cde-ac0d-4fd8c08fdb24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.123844] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 732.123844] env[68244]: value = "task-2780101" [ 732.123844] env[68244]: _type = "Task" [ 732.123844] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.143832] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780101, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.268409] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780098, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.376937] env[68244]: DEBUG nova.scheduler.client.report [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 732.397940] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780099, 'name': Rename_Task, 'duration_secs': 0.164603} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.397940] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 732.397940] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f587be8d-d5c7-41d2-a65c-4e3812bbe3a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.408198] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 732.408198] env[68244]: value = "task-2780102" [ 732.408198] env[68244]: _type = "Task" [ 732.408198] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.417492] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780100, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.423816] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780102, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.639968] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780101, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.647146] env[68244]: DEBUG nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 732.672834] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 732.673098] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.673257] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.673437] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.673583] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.673725] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 732.673932] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 732.674095] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 732.674263] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 732.674451] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 732.674638] env[68244]: DEBUG nova.virt.hardware [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 732.675626] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2ea271-107b-438d-8030-58b8149ee091 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.684336] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4215717-39ec-4b62-8d10-84368a02edfc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.768700] env[68244]: DEBUG oslo_vmware.api [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780098, 'name': PowerOnVM_Task, 'duration_secs': 0.609694} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.769077] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 732.769297] env[68244]: INFO nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Took 8.59 seconds to spawn the instance on the hypervisor. [ 732.769481] env[68244]: DEBUG nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 732.770518] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37766370-387b-40cc-946c-0428a63f865c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.883560] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.268s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.887685] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.546s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.892672] env[68244]: INFO nova.compute.claims [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.919909] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.920269] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f270caad-1b02-4d5b-a435-37b77c05c4e7/f270caad-1b02-4d5b-a435-37b77c05c4e7.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 732.920527] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.921304] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae6c0cc8-32c2-4ae2-a216-0eca5d156130 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.925377] env[68244]: INFO nova.scheduler.client.report [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleted allocations for instance 57504eac-0d7f-4fbe-b08c-6864713cca94 [ 732.932744] env[68244]: DEBUG oslo_vmware.api [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780102, 'name': PowerOnVM_Task, 'duration_secs': 0.521392} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.936712] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 732.936933] env[68244]: INFO nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Took 14.88 seconds to spawn the instance on the hypervisor. [ 732.937124] env[68244]: DEBUG nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 732.937576] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 732.937576] env[68244]: value = "task-2780103" [ 732.937576] env[68244]: _type = "Task" [ 732.937576] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.938586] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbaccd2-01f6-4579-9f98-289b12b1cc41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.952134] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.134901] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780101, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.296334] env[68244]: INFO nova.compute.manager [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Took 43.52 seconds to build instance. [ 733.441523] env[68244]: DEBUG oslo_concurrency.lockutils [None req-caf5dadb-ea79-4977-91ab-3e60f3f968f3 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "57504eac-0d7f-4fbe-b08c-6864713cca94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.678s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.452470] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075548} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.452779] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.453456] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f96707e-d646-47a3-9bff-a5a40584a818 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.481270] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] f270caad-1b02-4d5b-a435-37b77c05c4e7/f270caad-1b02-4d5b-a435-37b77c05c4e7.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.483456] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbb3c392-9fad-4623-8fc2-b56ef0db860a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.499775] env[68244]: INFO nova.compute.manager [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Took 52.81 seconds to build instance. [ 733.507593] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 733.507593] env[68244]: value = "task-2780104" [ 733.507593] env[68244]: _type = "Task" [ 733.507593] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.516711] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.600053] env[68244]: DEBUG nova.network.neutron [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Successfully updated port: d6df42d7-2b90-4e9a-a9cc-15adae4310a1 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.638687] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780101, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.685723] env[68244]: DEBUG nova.compute.manager [req-bb313e53-9cdc-4ec5-86a9-d9c595b68140 req-52775ef7-fd85-44b4-928c-576b50e4d9de service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Received event network-vif-plugged-d6df42d7-2b90-4e9a-a9cc-15adae4310a1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 733.686821] env[68244]: DEBUG oslo_concurrency.lockutils [req-bb313e53-9cdc-4ec5-86a9-d9c595b68140 req-52775ef7-fd85-44b4-928c-576b50e4d9de service nova] Acquiring lock "2aacd21f-d664-4267-8331-d3862f43d35b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.687082] env[68244]: DEBUG oslo_concurrency.lockutils [req-bb313e53-9cdc-4ec5-86a9-d9c595b68140 req-52775ef7-fd85-44b4-928c-576b50e4d9de service nova] Lock "2aacd21f-d664-4267-8331-d3862f43d35b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.687256] env[68244]: DEBUG oslo_concurrency.lockutils [req-bb313e53-9cdc-4ec5-86a9-d9c595b68140 req-52775ef7-fd85-44b4-928c-576b50e4d9de service nova] Lock "2aacd21f-d664-4267-8331-d3862f43d35b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.689541] env[68244]: DEBUG nova.compute.manager [req-bb313e53-9cdc-4ec5-86a9-d9c595b68140 req-52775ef7-fd85-44b4-928c-576b50e4d9de service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] No waiting events found dispatching network-vif-plugged-d6df42d7-2b90-4e9a-a9cc-15adae4310a1 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 733.689541] env[68244]: WARNING nova.compute.manager [req-bb313e53-9cdc-4ec5-86a9-d9c595b68140 req-52775ef7-fd85-44b4-928c-576b50e4d9de service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Received unexpected event network-vif-plugged-d6df42d7-2b90-4e9a-a9cc-15adae4310a1 for instance with vm_state building and task_state spawning. [ 733.801504] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dcf3821b-86d5-4d35-9fcb-155990fa11cb tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.623s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.001284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-efd75691-f956-40b1-a269-20940feee4be tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "fd4d5494-042b-457e-a826-dee4d87c0032" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.405s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.022155] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780104, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.108229] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "refresh_cache-2aacd21f-d664-4267-8331-d3862f43d35b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.108385] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "refresh_cache-2aacd21f-d664-4267-8331-d3862f43d35b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.108533] env[68244]: DEBUG nova.network.neutron [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.140624] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780101, 'name': CreateSnapshot_Task, 'duration_secs': 1.621269} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.140890] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 734.141665] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b6147d-8e4e-4bc7-ba95-57a1152e63a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.305745] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 734.487944] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 734.487944] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 734.510995] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 734.517073] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e8bd93-8fb1-4d08-8382-1f13d83e243f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.530141] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780104, 'name': ReconfigVM_Task, 'duration_secs': 0.574916} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.530840] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Reconfigured VM instance instance-00000018 to attach disk [datastore2] f270caad-1b02-4d5b-a435-37b77c05c4e7/f270caad-1b02-4d5b-a435-37b77c05c4e7.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.531589] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57cc6ec5-53a2-4e21-a91e-a036024674bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.535028] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c11ffcd-f202-4f04-887d-45541a29d9f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.538681] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "fd4d5494-042b-457e-a826-dee4d87c0032" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.538904] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "fd4d5494-042b-457e-a826-dee4d87c0032" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.539112] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.539637] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.539830] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "fd4d5494-042b-457e-a826-dee4d87c0032-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.572304] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 734.572304] env[68244]: value = "task-2780105" [ 734.572304] env[68244]: _type = "Task" [ 734.572304] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.573084] env[68244]: INFO nova.compute.manager [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Terminating instance [ 734.575213] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476f4d12-efc1-4422-97ac-dede7366a82d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.592147] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe052ab-f74f-4c49-8f1f-ddb8cefa90b9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.597044] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780105, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.608190] env[68244]: DEBUG nova.compute.provider_tree [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.663805] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 734.664148] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ea5ec5d0-4c31-4d09-85fa-9f5349ae4e4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.668276] env[68244]: DEBUG nova.network.neutron [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.676918] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 734.676918] env[68244]: value = "task-2780106" [ 734.676918] env[68244]: _type = "Task" [ 734.676918] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.686729] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.836104] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.011814] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.012111] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.012348] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.012537] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.012779] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.013019] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.013215] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 735.013410] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.041996] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.085818] env[68244]: DEBUG nova.compute.manager [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 735.085818] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 735.085818] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2458f816-0ec0-434c-9737-00fb6346cdce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.093668] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780105, 'name': Rename_Task, 'duration_secs': 0.156126} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.095962] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 735.095962] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5288a3c7-5d39-422a-8e89-f34fb656d874 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.099500] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 735.099872] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dffa6bda-fc6a-46bd-8fae-68ec15f656b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.102982] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 735.102982] env[68244]: value = "task-2780107" [ 735.102982] env[68244]: _type = "Task" [ 735.102982] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.111029] env[68244]: DEBUG nova.scheduler.client.report [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 735.114897] env[68244]: DEBUG oslo_vmware.api [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 735.114897] env[68244]: value = "task-2780108" [ 735.114897] env[68244]: _type = "Task" [ 735.114897] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.125854] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.133963] env[68244]: DEBUG oslo_vmware.api [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.193095] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.201967] env[68244]: DEBUG nova.network.neutron [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Updating instance_info_cache with network_info: [{"id": "d6df42d7-2b90-4e9a-a9cc-15adae4310a1", "address": "fa:16:3e:d5:0a:6a", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6df42d7-2b", "ovs_interfaceid": "d6df42d7-2b90-4e9a-a9cc-15adae4310a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.519253] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.614419] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780107, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.616270] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.617071] env[68244]: DEBUG nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 735.619551] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.074s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.620964] env[68244]: INFO nova.compute.claims [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.631640] env[68244]: DEBUG oslo_vmware.api [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780108, 'name': PowerOffVM_Task, 'duration_secs': 0.243428} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.631719] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 735.631888] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 735.632279] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62438c39-b06e-4e1f-b64d-60168bbd5368 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.688926] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.705057] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "refresh_cache-2aacd21f-d664-4267-8331-d3862f43d35b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.705399] env[68244]: DEBUG nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Instance network_info: |[{"id": "d6df42d7-2b90-4e9a-a9cc-15adae4310a1", "address": "fa:16:3e:d5:0a:6a", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6df42d7-2b", "ovs_interfaceid": "d6df42d7-2b90-4e9a-a9cc-15adae4310a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 735.705815] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:0a:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6df42d7-2b90-4e9a-a9cc-15adae4310a1', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.714230] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating folder: Project (4a581fe596ee49c6b66f17d1ed11d120). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.714749] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24060f88-4f70-44d0-9b77-fd0c30f05ad5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.726516] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created folder: Project (4a581fe596ee49c6b66f17d1ed11d120) in parent group-v558876. [ 735.728510] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating folder: Instances. Parent ref: group-v558960. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.728510] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6259f82c-d831-430f-8fe7-ccba5439e66b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.737427] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created folder: Instances in parent group-v558960. [ 735.737890] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.738014] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.738293] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a83fad54-6907-4fae-86fd-a832be827c5d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.769919] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.769919] env[68244]: value = "task-2780112" [ 735.769919] env[68244]: _type = "Task" [ 735.769919] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.778583] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 735.778805] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 735.779087] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Deleting the datastore file [datastore2] fd4d5494-042b-457e-a826-dee4d87c0032 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 735.779684] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f302ce1-ee78-4c51-b8e1-18efa73ea683 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.784866] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780112, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.786196] env[68244]: DEBUG oslo_vmware.api [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 735.786196] env[68244]: value = "task-2780113" [ 735.786196] env[68244]: _type = "Task" [ 735.786196] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.795135] env[68244]: DEBUG oslo_vmware.api [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.126016] env[68244]: DEBUG oslo_vmware.api [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780107, 'name': PowerOnVM_Task, 'duration_secs': 0.763383} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.126016] env[68244]: DEBUG nova.compute.utils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 736.131979] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 736.139033] env[68244]: INFO nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Took 9.23 seconds to spawn the instance on the hypervisor. [ 736.139033] env[68244]: DEBUG nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 736.139033] env[68244]: DEBUG nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 736.139033] env[68244]: DEBUG nova.network.neutron [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 736.141477] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb2a8c5-e651-481b-88e8-480438acf278 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.195168] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.283492] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780112, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.283842] env[68244]: DEBUG nova.policy [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '583aa5a96f694eb0af400771180c0e3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef5fff09e8174d0c8c09e5efc2164ab2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 736.294912] env[68244]: DEBUG oslo_vmware.api [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.486621] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "2d9dbf75-992d-4932-bd5d-84462494ebe8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.486621] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.638558] env[68244]: DEBUG nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.663026] env[68244]: INFO nova.compute.manager [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Took 42.84 seconds to build instance. [ 736.690787] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.782428] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780112, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.794719] env[68244]: DEBUG oslo_vmware.api [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.519588} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.797569] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 736.797569] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 736.797692] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 736.798125] env[68244]: INFO nova.compute.manager [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Took 1.71 seconds to destroy the instance on the hypervisor. [ 736.798125] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 736.798446] env[68244]: DEBUG nova.compute.manager [-] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 736.798546] env[68244]: DEBUG nova.network.neutron [-] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 736.864711] env[68244]: DEBUG nova.network.neutron [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Successfully created port: 01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.163510] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b50b2c8-5120-42d0-abd6-1473323432d0 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.348s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.184237] env[68244]: DEBUG nova.compute.manager [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Received event network-changed-d6df42d7-2b90-4e9a-a9cc-15adae4310a1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 737.184481] env[68244]: DEBUG nova.compute.manager [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Refreshing instance network info cache due to event network-changed-d6df42d7-2b90-4e9a-a9cc-15adae4310a1. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 737.184907] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] Acquiring lock "refresh_cache-2aacd21f-d664-4267-8331-d3862f43d35b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.184907] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] Acquired lock "refresh_cache-2aacd21f-d664-4267-8331-d3862f43d35b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.185173] env[68244]: DEBUG nova.network.neutron [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Refreshing network info cache for port d6df42d7-2b90-4e9a-a9cc-15adae4310a1 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.201998] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.206839] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cc3759-4d1e-43ca-8981-b3476b1c577e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.215761] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1e2e91-5f7f-4ca1-ba77-d3b569d2646a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.249038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1ff1d5-ee68-4a7d-97af-a536e3ea9ebc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.256960] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633dee82-0667-4ffd-b3c2-101a8415a72a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.273155] env[68244]: DEBUG nova.compute.provider_tree [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.282935] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780112, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.629436] env[68244]: DEBUG nova.compute.manager [req-87a16f27-f4b7-42e7-9a9b-1a35183bf04b req-07a51bbe-ecc3-4824-8202-74808a4fcaf0 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-vif-deleted-289c59b7-4aa5-4a58-a62b-6b55b73f8255 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 737.629726] env[68244]: INFO nova.compute.manager [req-87a16f27-f4b7-42e7-9a9b-1a35183bf04b req-07a51bbe-ecc3-4824-8202-74808a4fcaf0 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Neutron deleted interface 289c59b7-4aa5-4a58-a62b-6b55b73f8255; detaching it from the instance and deleting it from the info cache [ 737.630143] env[68244]: DEBUG nova.network.neutron [req-87a16f27-f4b7-42e7-9a9b-1a35183bf04b req-07a51bbe-ecc3-4824-8202-74808a4fcaf0 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Updating instance_info_cache with network_info: [{"id": "34e93d11-e4ad-463d-9400-d65db1e5a394", "address": "fa:16:3e:5d:20:ab", "network": {"id": "8f99087f-d6b6-4722-b1c3-9c2df51ef236", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-803038205", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "877ed63d-906e-4bd5-a1fc-7e82d172d41e", "external-id": "nsx-vlan-transportzone-642", "segmentation_id": 642, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e93d11-e4", "ovs_interfaceid": "34e93d11-e4ad-463d-9400-d65db1e5a394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "address": "fa:16:3e:77:54:ab", "network": {"id": "2aa8a3c1-66cd-4405-92ca-932c79a3ab37", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1004136721", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf80f51a3-77", "ovs_interfaceid": "f80f51a3-7747-4204-ae15-fbe07a3765ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.660526] env[68244]: DEBUG nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.666165] env[68244]: DEBUG nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.682864] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.682864] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.682864] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.683125] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.683348] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.683544] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.683796] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.684030] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.684265] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.684529] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.684775] env[68244]: DEBUG nova.virt.hardware [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.685718] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf00401-e898-40bd-87c4-3c83167d488a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.699135] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.702928] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2addc354-37f1-4d05-8447-86721914cd33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.773331] env[68244]: DEBUG nova.scheduler.client.report [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.789106] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780112, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.986980] env[68244]: DEBUG nova.network.neutron [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Updated VIF entry in instance network info cache for port d6df42d7-2b90-4e9a-a9cc-15adae4310a1. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 737.986980] env[68244]: DEBUG nova.network.neutron [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Updating instance_info_cache with network_info: [{"id": "d6df42d7-2b90-4e9a-a9cc-15adae4310a1", "address": "fa:16:3e:d5:0a:6a", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6df42d7-2b", "ovs_interfaceid": "d6df42d7-2b90-4e9a-a9cc-15adae4310a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.986980] env[68244]: DEBUG nova.network.neutron [-] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.133933] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ebe9c7f-cd41-4f54-a5b5-57b617270d0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.143285] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cc6459-df33-491a-995d-84271d96aea7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.176354] env[68244]: DEBUG nova.compute.manager [req-87a16f27-f4b7-42e7-9a9b-1a35183bf04b req-07a51bbe-ecc3-4824-8202-74808a4fcaf0 service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Detach interface failed, port_id=289c59b7-4aa5-4a58-a62b-6b55b73f8255, reason: Instance fd4d5494-042b-457e-a826-dee4d87c0032 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 738.193988] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.194919] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.282157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.282686] env[68244]: DEBUG nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 738.285325] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780112, 'name': CreateVM_Task, 'duration_secs': 2.354529} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.287193] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 29.998s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.287193] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 738.287832] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.289232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.289232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 738.289232] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ed24043-9aea-4952-a5a7-6ab8f0b00b9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.293504] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 738.293504] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52872730-1f37-fc41-046e-2924e22dbaef" [ 738.293504] env[68244]: _type = "Task" [ 738.293504] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.301447] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52872730-1f37-fc41-046e-2924e22dbaef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.402742] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] Releasing lock "refresh_cache-2aacd21f-d664-4267-8331-d3862f43d35b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.402742] env[68244]: DEBUG nova.compute.manager [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Received event network-changed-817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 738.402742] env[68244]: DEBUG nova.compute.manager [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Refreshing instance network info cache due to event network-changed-817af294-3a0f-4ead-9a86-ed1f635dc303. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 738.402742] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] Acquiring lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.402742] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] Acquired lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.402742] env[68244]: DEBUG nova.network.neutron [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Refreshing network info cache for port 817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.442107] env[68244]: INFO nova.compute.manager [-] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Took 1.64 seconds to deallocate network for instance. [ 738.697594] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.786302] env[68244]: DEBUG nova.network.neutron [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Successfully updated port: 01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.789808] env[68244]: DEBUG nova.compute.utils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 738.791351] env[68244]: DEBUG nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 738.791598] env[68244]: DEBUG nova.network.neutron [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.796949] env[68244]: INFO nova.compute.claims [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.814917] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52872730-1f37-fc41-046e-2924e22dbaef, 'name': SearchDatastore_Task, 'duration_secs': 0.024588} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.815284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.815854] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.815854] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.815963] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.816114] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.816383] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f02341a0-8bbb-4f40-b9c6-a12815501d15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.825885] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.826076] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 738.826862] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98793f89-9c9c-490e-b027-e12d213ee547 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.833675] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 738.833675] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5214a563-76c4-5584-9523-e76e02ba7bb2" [ 738.833675] env[68244]: _type = "Task" [ 738.833675] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.841842] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5214a563-76c4-5584-9523-e76e02ba7bb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.899150] env[68244]: DEBUG nova.policy [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cddbad2016a4b98b5c05082a13f59f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87c0473d09c04fb8a80d27a43c07bef4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 738.946907] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.094494] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "f270caad-1b02-4d5b-a435-37b77c05c4e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.094865] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.095236] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "f270caad-1b02-4d5b-a435-37b77c05c4e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.095499] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.095618] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.098440] env[68244]: INFO nova.compute.manager [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Terminating instance [ 739.197400] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.220680] env[68244]: DEBUG nova.network.neutron [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updated VIF entry in instance network info cache for port 817af294-3a0f-4ead-9a86-ed1f635dc303. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 739.221094] env[68244]: DEBUG nova.network.neutron [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updating instance_info_cache with network_info: [{"id": "817af294-3a0f-4ead-9a86-ed1f635dc303", "address": "fa:16:3e:be:f2:fe", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap817af294-3a", "ovs_interfaceid": "817af294-3a0f-4ead-9a86-ed1f635dc303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.284786] env[68244]: DEBUG nova.compute.manager [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Received event network-changed-817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 739.284980] env[68244]: DEBUG nova.compute.manager [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Refreshing instance network info cache due to event network-changed-817af294-3a0f-4ead-9a86-ed1f635dc303. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 739.285187] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Acquiring lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.294918] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.295068] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquired lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.295220] env[68244]: DEBUG nova.network.neutron [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.302085] env[68244]: DEBUG nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 739.310023] env[68244]: INFO nova.compute.resource_tracker [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating resource usage from migration 708441f4-9a09-4c99-bfc8-42d73de28a7f [ 739.348749] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5214a563-76c4-5584-9523-e76e02ba7bb2, 'name': SearchDatastore_Task, 'duration_secs': 0.013599} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.355985] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03902dd3-1fe4-466b-a546-516c71929032 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.362124] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 739.362124] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ba3e50-bd98-8a39-7f12-7b5d9245d829" [ 739.362124] env[68244]: _type = "Task" [ 739.362124] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.374761] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ba3e50-bd98-8a39-7f12-7b5d9245d829, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.415216] env[68244]: DEBUG nova.network.neutron [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Successfully created port: 4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.605516] env[68244]: DEBUG nova.compute.manager [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 739.608450] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 739.608705] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6278649-4f18-4fba-b183-d20fe6b500bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.617554] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.620196] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48aafd45-463c-4503-9100-98eb503aa319 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.626679] env[68244]: DEBUG oslo_vmware.api [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 739.626679] env[68244]: value = "task-2780114" [ 739.626679] env[68244]: _type = "Task" [ 739.626679] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.635144] env[68244]: DEBUG oslo_vmware.api [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.699218] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780106, 'name': CloneVM_Task, 'duration_secs': 4.698711} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.699466] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Created linked-clone VM from snapshot [ 739.700236] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc2cc4d-f690-4cd4-ac93-4674dc07693c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.718414] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Uploading image 0428e551-b5ef-4ca7-9b75-5cb0cdbb76f1 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 739.723025] env[68244]: DEBUG nova.compute.manager [req-c9385c0a-a462-4950-91f3-7f75400ff95e req-f26078cd-04c4-444b-b167-b24d8ece276c service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-vif-deleted-f80f51a3-7747-4204-ae15-fbe07a3765ea {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 739.723244] env[68244]: DEBUG nova.compute.manager [req-c9385c0a-a462-4950-91f3-7f75400ff95e req-f26078cd-04c4-444b-b167-b24d8ece276c service nova] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Received event network-vif-deleted-34e93d11-e4ad-463d-9400-d65db1e5a394 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 739.726621] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ea91b0e-0f61-4e1b-af61-2392bcdb9d4b req-ea883078-484a-4df4-9ad7-d7888a7942d5 service nova] Releasing lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.727917] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Acquired lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.727917] env[68244]: DEBUG nova.network.neutron [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Refreshing network info cache for port 817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.743727] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 739.743727] env[68244]: value = "vm-558959" [ 739.743727] env[68244]: _type = "VirtualMachine" [ 739.743727] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 739.746125] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b5e99e67-5de0-43ec-a3cb-55a236298f15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.752116] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lease: (returnval){ [ 739.752116] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f52662-2ec6-c2ca-0d82-54dba0e7c6ae" [ 739.752116] env[68244]: _type = "HttpNfcLease" [ 739.752116] env[68244]: } obtained for exporting VM: (result){ [ 739.752116] env[68244]: value = "vm-558959" [ 739.752116] env[68244]: _type = "VirtualMachine" [ 739.752116] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 739.752528] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the lease: (returnval){ [ 739.752528] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f52662-2ec6-c2ca-0d82-54dba0e7c6ae" [ 739.752528] env[68244]: _type = "HttpNfcLease" [ 739.752528] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 739.762507] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 739.762507] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f52662-2ec6-c2ca-0d82-54dba0e7c6ae" [ 739.762507] env[68244]: _type = "HttpNfcLease" [ 739.762507] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 739.828098] env[68244]: DEBUG nova.network.neutron [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.874585] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ba3e50-bd98-8a39-7f12-7b5d9245d829, 'name': SearchDatastore_Task, 'duration_secs': 0.033287} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.875203] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.875635] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2aacd21f-d664-4267-8331-d3862f43d35b/2aacd21f-d664-4267-8331-d3862f43d35b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 739.875724] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02404363-2b9b-41eb-a5d1-19c6c84a9eb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.882963] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 739.882963] env[68244]: value = "task-2780116" [ 739.882963] env[68244]: _type = "Task" [ 739.882963] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.895916] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.986060] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8de3eee-828d-4a85-9563-af0502329235 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.996855] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b61e0e9-377e-44c8-bec8-d224c55cf462 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.031199] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1765a044-e177-4641-baeb-72b0885019d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.040044] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba9540f-1cd0-4dcc-9188-38d9b33c1d5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.056440] env[68244]: DEBUG nova.compute.provider_tree [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.059101] env[68244]: DEBUG nova.network.neutron [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Updating instance_info_cache with network_info: [{"id": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "address": "fa:16:3e:a2:26:1a", "network": {"id": "684a2004-5112-4bec-8575-c61b6101134d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1965396615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef5fff09e8174d0c8c09e5efc2164ab2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01eeb7b1-e8", "ovs_interfaceid": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.137323] env[68244]: DEBUG oslo_vmware.api [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780114, 'name': PowerOffVM_Task, 'duration_secs': 0.215451} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.137612] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 740.137802] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 740.138185] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa8d0e10-406e-40d5-a81e-00fff0a6d3df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.261474] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 740.261474] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f52662-2ec6-c2ca-0d82-54dba0e7c6ae" [ 740.261474] env[68244]: _type = "HttpNfcLease" [ 740.261474] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 740.261806] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 740.261806] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f52662-2ec6-c2ca-0d82-54dba0e7c6ae" [ 740.261806] env[68244]: _type = "HttpNfcLease" [ 740.261806] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 740.262643] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09671d0-2e7a-44a2-9b18-4e937646d7f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.272684] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fccbbf-01a5-fe14-035c-def3ce2065a5/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 740.273429] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fccbbf-01a5-fe14-035c-def3ce2065a5/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 740.331914] env[68244]: DEBUG nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 740.364665] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 740.365027] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.365120] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 740.365365] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.365620] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 740.365686] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 740.365903] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 740.366128] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 740.366297] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 740.366596] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 740.366870] env[68244]: DEBUG nova.virt.hardware [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 740.367820] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690bb823-f0ea-4c7b-b8a4-30bf79b932ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.376652] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd91a398-f63f-4f9b-abd0-f480cd6cafbe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.403233] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780116, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.552800] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0f739b67-9746-457b-98d4-9248f231efb3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.561087] env[68244]: DEBUG nova.scheduler.client.report [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.573023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Releasing lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.573023] env[68244]: DEBUG nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Instance network_info: |[{"id": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "address": "fa:16:3e:a2:26:1a", "network": {"id": "684a2004-5112-4bec-8575-c61b6101134d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1965396615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef5fff09e8174d0c8c09e5efc2164ab2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01eeb7b1-e8", "ovs_interfaceid": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 740.573023] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:26:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01eeb7b1-e851-425f-933e-fdd80ac8564b', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.582540] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Creating folder: Project (ef5fff09e8174d0c8c09e5efc2164ab2). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.586929] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c73fedb-adf1-48d0-879e-5e5af98f822a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.603978] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Created folder: Project (ef5fff09e8174d0c8c09e5efc2164ab2) in parent group-v558876. [ 740.604316] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Creating folder: Instances. Parent ref: group-v558963. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.605306] env[68244]: DEBUG nova.network.neutron [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updated VIF entry in instance network info cache for port 817af294-3a0f-4ead-9a86-ed1f635dc303. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 740.605698] env[68244]: DEBUG nova.network.neutron [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updating instance_info_cache with network_info: [{"id": "817af294-3a0f-4ead-9a86-ed1f635dc303", "address": "fa:16:3e:be:f2:fe", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap817af294-3a", "ovs_interfaceid": "817af294-3a0f-4ead-9a86-ed1f635dc303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.608519] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f337a889-87de-4867-a8ba-721e110f0d40 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.620278] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Created folder: Instances in parent group-v558963. [ 740.620711] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.621992] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbc08614-926e-4209-abec-4808f223943a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.622644] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed7b5665-da22-4a2f-b075-c4c41be1d69a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.650536] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.650536] env[68244]: value = "task-2780120" [ 740.650536] env[68244]: _type = "Task" [ 740.650536] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.660352] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780120, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.904665] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670448} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.905752] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2aacd21f-d664-4267-8331-d3862f43d35b/2aacd21f-d664-4267-8331-d3862f43d35b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 740.906267] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 740.906703] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a878e02e-6f2b-433b-bdec-b11a858ac495 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.914949] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 740.914949] env[68244]: value = "task-2780121" [ 740.914949] env[68244]: _type = "Task" [ 740.914949] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.923984] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.074246] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.788s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.074531] env[68244]: INFO nova.compute.manager [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Migrating [ 741.076245] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.076555] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.078429] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 741.078429] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 741.079794] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Deleting the datastore file [datastore2] f270caad-1b02-4d5b-a435-37b77c05c4e7 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 741.080397] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.138s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.082632] env[68244]: INFO nova.compute.claims [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.085937] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fb9ac73-c944-4085-8e08-fc07965a165d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.094406] env[68244]: DEBUG oslo_vmware.api [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for the task: (returnval){ [ 741.094406] env[68244]: value = "task-2780122" [ 741.094406] env[68244]: _type = "Task" [ 741.094406] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.103676] env[68244]: DEBUG oslo_vmware.api [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.111503] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Releasing lock "refresh_cache-8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.111638] env[68244]: DEBUG nova.compute.manager [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Received event network-vif-plugged-01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 741.112976] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Acquiring lock "bbc08614-926e-4209-abec-4808f223943a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.112976] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Lock "bbc08614-926e-4209-abec-4808f223943a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.112976] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Lock "bbc08614-926e-4209-abec-4808f223943a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.112976] env[68244]: DEBUG nova.compute.manager [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] No waiting events found dispatching network-vif-plugged-01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 741.113205] env[68244]: WARNING nova.compute.manager [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Received unexpected event network-vif-plugged-01eeb7b1-e851-425f-933e-fdd80ac8564b for instance with vm_state building and task_state spawning. [ 741.113343] env[68244]: DEBUG nova.compute.manager [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Received event network-changed-01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 741.113528] env[68244]: DEBUG nova.compute.manager [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Refreshing instance network info cache due to event network-changed-01eeb7b1-e851-425f-933e-fdd80ac8564b. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 741.113841] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Acquiring lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.114121] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Acquired lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.114822] env[68244]: DEBUG nova.network.neutron [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Refreshing network info cache for port 01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 741.161046] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780120, 'name': CreateVM_Task, 'duration_secs': 0.386728} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.161046] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbc08614-926e-4209-abec-4808f223943a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.161680] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.161888] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.162316] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.162823] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05bbe5f4-f62f-4e76-916d-36d47a9741b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.168469] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 741.168469] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ded90-ee9c-5d04-7472-83f1e6783796" [ 741.168469] env[68244]: _type = "Task" [ 741.168469] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.177242] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ded90-ee9c-5d04-7472-83f1e6783796, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.424197] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072167} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.424547] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.425449] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432a1cbb-83a3-4d1b-9914-da1d5ea7f808 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.448759] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 2aacd21f-d664-4267-8331-d3862f43d35b/2aacd21f-d664-4267-8331-d3862f43d35b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.449226] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b079722-eda2-47a5-b999-2fc2808cab48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.469752] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 741.469752] env[68244]: value = "task-2780123" [ 741.469752] env[68244]: _type = "Task" [ 741.469752] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.479046] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780123, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.581044] env[68244]: INFO nova.compute.rpcapi [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 741.581667] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.614695] env[68244]: DEBUG oslo_vmware.api [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Task: {'id': task-2780122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27602} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.615121] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.615624] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.615862] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.616141] env[68244]: INFO nova.compute.manager [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Took 2.01 seconds to destroy the instance on the hypervisor. [ 741.616559] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.616877] env[68244]: DEBUG nova.compute.manager [-] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 741.617032] env[68244]: DEBUG nova.network.neutron [-] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.681135] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ded90-ee9c-5d04-7472-83f1e6783796, 'name': SearchDatastore_Task, 'duration_secs': 0.011388} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.681549] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.681831] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.682118] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.682347] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.682695] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.682946] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2258129-9051-4a1b-9875-37d30b6ee027 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.693401] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.693473] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.698018] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd233c9a-191f-4923-98fa-f9a2e2a06ae6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.700963] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 741.700963] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524f3dc9-7ea8-90e8-0e7a-a7b244f75f2d" [ 741.700963] env[68244]: _type = "Task" [ 741.700963] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.709839] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524f3dc9-7ea8-90e8-0e7a-a7b244f75f2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.711481] env[68244]: DEBUG nova.network.neutron [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Successfully updated port: 4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 741.843174] env[68244]: DEBUG nova.compute.manager [req-a61ddca9-49b4-49a6-9aaa-4e4dda277747 req-99e56332-913a-4774-a5e5-b2a75b0d7000 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Received event network-vif-plugged-4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 741.845897] env[68244]: DEBUG oslo_concurrency.lockutils [req-a61ddca9-49b4-49a6-9aaa-4e4dda277747 req-99e56332-913a-4774-a5e5-b2a75b0d7000 service nova] Acquiring lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.846229] env[68244]: DEBUG oslo_concurrency.lockutils [req-a61ddca9-49b4-49a6-9aaa-4e4dda277747 req-99e56332-913a-4774-a5e5-b2a75b0d7000 service nova] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.846598] env[68244]: DEBUG oslo_concurrency.lockutils [req-a61ddca9-49b4-49a6-9aaa-4e4dda277747 req-99e56332-913a-4774-a5e5-b2a75b0d7000 service nova] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.846811] env[68244]: DEBUG nova.compute.manager [req-a61ddca9-49b4-49a6-9aaa-4e4dda277747 req-99e56332-913a-4774-a5e5-b2a75b0d7000 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] No waiting events found dispatching network-vif-plugged-4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 741.847384] env[68244]: WARNING nova.compute.manager [req-a61ddca9-49b4-49a6-9aaa-4e4dda277747 req-99e56332-913a-4774-a5e5-b2a75b0d7000 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Received unexpected event network-vif-plugged-4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 for instance with vm_state building and task_state spawning. [ 741.988020] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780123, 'name': ReconfigVM_Task, 'duration_secs': 0.293116} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.988840] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 2aacd21f-d664-4267-8331-d3862f43d35b/2aacd21f-d664-4267-8331-d3862f43d35b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.989991] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd744323-64c5-4bbe-a0ca-f1c0f7785567 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.001755] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 742.001755] env[68244]: value = "task-2780124" [ 742.001755] env[68244]: _type = "Task" [ 742.001755] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.015021] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780124, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.109362] env[68244]: DEBUG nova.network.neutron [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Updated VIF entry in instance network info cache for port 01eeb7b1-e851-425f-933e-fdd80ac8564b. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 742.109362] env[68244]: DEBUG nova.network.neutron [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Updating instance_info_cache with network_info: [{"id": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "address": "fa:16:3e:a2:26:1a", "network": {"id": "684a2004-5112-4bec-8575-c61b6101134d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1965396615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef5fff09e8174d0c8c09e5efc2164ab2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01eeb7b1-e8", "ovs_interfaceid": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.109963] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.111336] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.111587] env[68244]: DEBUG nova.network.neutron [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.214835] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.215082] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.215304] env[68244]: DEBUG nova.network.neutron [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.216995] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524f3dc9-7ea8-90e8-0e7a-a7b244f75f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010543} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.221143] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecd8660a-5059-41ab-9d2c-5d73958c2369 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.228291] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 742.228291] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5250353f-9cb2-ae0d-b780-6e87f35e2b97" [ 742.228291] env[68244]: _type = "Task" [ 742.228291] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.238508] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5250353f-9cb2-ae0d-b780-6e87f35e2b97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.377024] env[68244]: DEBUG nova.compute.manager [req-b467895d-b249-49ed-b585-b09b033fd30e req-1fa2f8f2-7729-436b-8061-71df90dd30d5 service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Received event network-vif-deleted-642dfe35-bea6-4d0d-a44f-9eac38934526 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 742.377024] env[68244]: INFO nova.compute.manager [req-b467895d-b249-49ed-b585-b09b033fd30e req-1fa2f8f2-7729-436b-8061-71df90dd30d5 service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Neutron deleted interface 642dfe35-bea6-4d0d-a44f-9eac38934526; detaching it from the instance and deleting it from the info cache [ 742.377024] env[68244]: DEBUG nova.network.neutron [req-b467895d-b249-49ed-b585-b09b033fd30e req-1fa2f8f2-7729-436b-8061-71df90dd30d5 service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.495520] env[68244]: DEBUG nova.network.neutron [-] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.514054] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780124, 'name': Rename_Task, 'duration_secs': 0.145474} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.518399] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.519259] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcc50eef-afe1-4d5b-b8d9-c369e720faf7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.532801] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 742.532801] env[68244]: value = "task-2780125" [ 742.532801] env[68244]: _type = "Task" [ 742.532801] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.543167] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.614158] env[68244]: DEBUG oslo_concurrency.lockutils [req-958a346e-0b9a-44ba-a831-b6105758e43c req-bca7bc54-74ee-4eec-ac01-a1ddc9a0a83e service nova] Releasing lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.715100] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782f7e3c-b595-4ee8-b8e2-27db07e83ca6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.726993] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6187949-e65b-484c-939f-60c2933fea64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.740054] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5250353f-9cb2-ae0d-b780-6e87f35e2b97, 'name': SearchDatastore_Task, 'duration_secs': 0.010439} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.765800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.766112] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] bbc08614-926e-4209-abec-4808f223943a/bbc08614-926e-4209-abec-4808f223943a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 742.767310] env[68244]: DEBUG nova.network.neutron [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.769351] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81b35c54-3a3e-4efc-8b35-b80b879e3a50 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.772396] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1886176-a63a-481a-ae04-32bc0f94eaaf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.782295] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e86382e-b52c-405c-b77e-1d093fa5d592 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.786296] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 742.786296] env[68244]: value = "task-2780126" [ 742.786296] env[68244]: _type = "Task" [ 742.786296] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.798464] env[68244]: DEBUG nova.compute.provider_tree [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.807442] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.878390] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1d11e76-f79c-4c97-890c-907d142550c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.889045] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca11b4d2-36c0-4d43-81ce-395b4ee0d61e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.923706] env[68244]: DEBUG nova.compute.manager [req-b467895d-b249-49ed-b585-b09b033fd30e req-1fa2f8f2-7729-436b-8061-71df90dd30d5 service nova] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Detach interface failed, port_id=642dfe35-bea6-4d0d-a44f-9eac38934526, reason: Instance f270caad-1b02-4d5b-a435-37b77c05c4e7 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 742.951326] env[68244]: DEBUG nova.network.neutron [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Updating instance_info_cache with network_info: [{"id": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "address": "fa:16:3e:78:36:a6", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb55cce-a3", "ovs_interfaceid": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.999882] env[68244]: INFO nova.compute.manager [-] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Took 1.38 seconds to deallocate network for instance. [ 743.043495] env[68244]: DEBUG oslo_vmware.api [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780125, 'name': PowerOnVM_Task, 'duration_secs': 0.496738} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.043782] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.043985] env[68244]: INFO nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Took 10.40 seconds to spawn the instance on the hypervisor. [ 743.044251] env[68244]: DEBUG nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.045637] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f254706e-d1a9-4222-9f11-1c8e014a2156 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.206057] env[68244]: DEBUG nova.network.neutron [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance_info_cache with network_info: [{"id": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "address": "fa:16:3e:78:0b:4f", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bc0d0f1-ef", "ovs_interfaceid": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.297039] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780126, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.306887] env[68244]: DEBUG nova.scheduler.client.report [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.455276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.455600] env[68244]: DEBUG nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Instance network_info: |[{"id": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "address": "fa:16:3e:78:36:a6", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb55cce-a3", "ovs_interfaceid": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 743.456187] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:36:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98d96b75-ac36-499a-adc2-130c8c1d55ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fb55cce-a3f8-40f2-92e8-9f7166bcbf26', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.464150] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating folder: Project (87c0473d09c04fb8a80d27a43c07bef4). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.464870] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddaf6b01-751a-485c-bc7f-2666d6cbaa33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.477481] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created folder: Project (87c0473d09c04fb8a80d27a43c07bef4) in parent group-v558876. [ 743.477757] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating folder: Instances. Parent ref: group-v558966. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.477995] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5c4f0e2-4c01-4b4c-a96e-f33a9b7db018 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.488496] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created folder: Instances in parent group-v558966. [ 743.488789] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.489026] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 743.489253] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db4ca872-b07c-4a18-8fa9-8753d5fff0e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.513021] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.513021] env[68244]: value = "task-2780129" [ 743.513021] env[68244]: _type = "Task" [ 743.513021] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.515734] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.519675] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780129, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.569165] env[68244]: INFO nova.compute.manager [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Took 46.95 seconds to build instance. [ 743.710718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.798898] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780126, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.815024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.733s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.815024] env[68244]: DEBUG nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.817135] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.493s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.817475] env[68244]: DEBUG nova.objects.instance [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 744.023736] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780129, 'name': CreateVM_Task, 'duration_secs': 0.492078} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.024692] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 744.024969] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.025732] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.026455] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 744.026748] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-727b4ab3-c914-4e4b-90e5-7f60a791617a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.031818] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 744.031818] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52522bcd-1cdf-8ec0-f1c2-01d7a7bd9253" [ 744.031818] env[68244]: _type = "Task" [ 744.031818] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.042271] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52522bcd-1cdf-8ec0-f1c2-01d7a7bd9253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.070917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea79744b-5e38-41ed-83e3-f790f6b803ce tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "2aacd21f-d664-4267-8331-d3862f43d35b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.462s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.092446] env[68244]: DEBUG nova.compute.manager [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Received event network-changed-4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 744.095276] env[68244]: DEBUG nova.compute.manager [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Refreshing instance network info cache due to event network-changed-4fb55cce-a3f8-40f2-92e8-9f7166bcbf26. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 744.095557] env[68244]: DEBUG oslo_concurrency.lockutils [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] Acquiring lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.095822] env[68244]: DEBUG oslo_concurrency.lockutils [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] Acquired lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.095869] env[68244]: DEBUG nova.network.neutron [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Refreshing network info cache for port 4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 744.298047] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780126, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.497429} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.298326] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] bbc08614-926e-4209-abec-4808f223943a/bbc08614-926e-4209-abec-4808f223943a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 744.298774] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.298774] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a205424-c494-44ac-a552-9af736509aa2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.306078] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 744.306078] env[68244]: value = "task-2780130" [ 744.306078] env[68244]: _type = "Task" [ 744.306078] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.314600] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.323257] env[68244]: DEBUG nova.compute.utils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.329440] env[68244]: DEBUG nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.329747] env[68244]: DEBUG nova.network.neutron [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.387545] env[68244]: DEBUG nova.policy [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e084535403fd4ddf8cfc6a8160e9cf2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9c8e956516544ccba9032a48f532d19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.544741] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52522bcd-1cdf-8ec0-f1c2-01d7a7bd9253, 'name': SearchDatastore_Task, 'duration_secs': 0.011674} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.545211] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.545543] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.545836] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.546160] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.547037] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.547037] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66b94389-c21b-4218-a3fb-102b92dba2e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.556101] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.556101] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 744.557124] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-826a9bef-a321-4eb0-9787-2ef751029166 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.564051] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 744.564051] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f279bd-a52d-80a8-5134-ce66cc40be5e" [ 744.564051] env[68244]: _type = "Task" [ 744.564051] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.577246] env[68244]: DEBUG nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.579915] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f279bd-a52d-80a8-5134-ce66cc40be5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.621814] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.622060] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.824233] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065374} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.826620] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 744.826620] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295c9b56-e129-4745-856d-1a9d11644209 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.829066] env[68244]: DEBUG nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.832902] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c517069-1d8a-4003-8a2f-2a28686b0d2f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.836620] env[68244]: DEBUG nova.network.neutron [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Updated VIF entry in instance network info cache for port 4fb55cce-a3f8-40f2-92e8-9f7166bcbf26. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 744.837401] env[68244]: DEBUG nova.network.neutron [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Updating instance_info_cache with network_info: [{"id": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "address": "fa:16:3e:78:36:a6", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb55cce-a3", "ovs_interfaceid": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.838132] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.216s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.839871] env[68244]: INFO nova.compute.claims [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.869183] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] bbc08614-926e-4209-abec-4808f223943a/bbc08614-926e-4209-abec-4808f223943a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 744.871025] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c5a775f-aec0-4705-b2a9-c9ec67e9b2a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.893270] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 744.893270] env[68244]: value = "task-2780131" [ 744.893270] env[68244]: _type = "Task" [ 744.893270] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.903129] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780131, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.927358] env[68244]: DEBUG nova.network.neutron [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Successfully created port: df771f98-b8ac-43c4-8f5b-d09501711a88 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.077124] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f279bd-a52d-80a8-5134-ce66cc40be5e, 'name': SearchDatastore_Task, 'duration_secs': 0.011285} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.078043] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b29d9278-e47e-4fd2-a564-6e254ba32cc5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.094023] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 745.094023] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a39585-ceb8-7978-4585-a5c4daa2f780" [ 745.094023] env[68244]: _type = "Task" [ 745.094023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.102326] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.106799] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a39585-ceb8-7978-4585-a5c4daa2f780, 'name': SearchDatastore_Task, 'duration_secs': 0.011076} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.107327] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.107643] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/e2099d6d-5ab7-4a3e-8034-a3b4fc422749.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 745.107886] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1915b2cd-8afe-4585-8472-c81914e72715 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.115657] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 745.115657] env[68244]: value = "task-2780132" [ 745.115657] env[68244]: _type = "Task" [ 745.115657] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.123757] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.227020] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c72b41-bcdc-4571-83fe-1ea676be20cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.245761] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance '10957648-8618-4f2c-8b08-5468bca20cfc' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 745.344343] env[68244]: DEBUG oslo_concurrency.lockutils [req-f563a68f-f072-4f29-8d1b-b53b78078538 req-eac5b501-3e68-478b-a8f1-6bf51a2386d4 service nova] Releasing lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.407359] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780131, 'name': ReconfigVM_Task, 'duration_secs': 0.508734} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.407770] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Reconfigured VM instance instance-00000019 to attach disk [datastore2] bbc08614-926e-4209-abec-4808f223943a/bbc08614-926e-4209-abec-4808f223943a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.408562] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2193896-8e9b-47a9-92ec-80dbcbdff6e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.419244] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 745.419244] env[68244]: value = "task-2780133" [ 745.419244] env[68244]: _type = "Task" [ 745.419244] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.429314] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780133, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.630665] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780132, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.727038] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "8c00240d-5124-4ada-bd4d-4acd39a345c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.727366] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.754358] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.754680] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-501d0200-394f-45c8-b688-7fb7bd0050df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.762122] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 745.762122] env[68244]: value = "task-2780134" [ 745.762122] env[68244]: _type = "Task" [ 745.762122] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.773601] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.843741] env[68244]: DEBUG nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.867732] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.867868] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.867990] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.868191] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.868337] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.869280] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.869280] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.869280] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.869280] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.869280] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.869469] env[68244]: DEBUG nova.virt.hardware [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.870235] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ea06cc-4b56-43f1-9448-c3683a797ea3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.882074] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db64eca-7db9-4947-a1d4-4a22b782888f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.932944] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780133, 'name': Rename_Task, 'duration_secs': 0.292124} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.936312] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 745.937609] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5c64a57-9f32-4371-a2ef-b08e6ec76f74 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.944581] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 745.944581] env[68244]: value = "task-2780135" [ 745.944581] env[68244]: _type = "Task" [ 745.944581] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.960457] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780135, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.128324] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780132, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.274407] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780134, 'name': PowerOffVM_Task, 'duration_secs': 0.268149} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.274689] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 746.274869] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance '10957648-8618-4f2c-8b08-5468bca20cfc' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 746.456586] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780135, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.524417] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d636b31-b229-412e-bca7-f49a7de6346a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.534789] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc203a8-c645-4fd7-80a1-08b7a8d5a273 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.570912] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48745aee-0252-4af7-97f9-62feb5c7ae2c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.582971] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b071a90d-57ff-4a61-b7d1-fb916ce1d66b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.597963] env[68244]: DEBUG nova.compute.provider_tree [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.631211] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780132, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.781726] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 746.781976] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.782149] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 746.782332] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.782477] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 746.782677] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 746.782824] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 746.782985] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 746.783164] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 746.783326] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 746.783519] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 746.790232] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca861f91-3da0-4f89-99f4-d5295f9d8310 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.810774] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 746.810774] env[68244]: value = "task-2780136" [ 746.810774] env[68244]: _type = "Task" [ 746.810774] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.821941] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780136, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.960995] env[68244]: DEBUG oslo_vmware.api [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780135, 'name': PowerOnVM_Task, 'duration_secs': 1.012161} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.961320] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 746.961526] env[68244]: INFO nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Took 9.30 seconds to spawn the instance on the hypervisor. [ 746.961797] env[68244]: DEBUG nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.962555] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e09372-e440-4b1e-a6f1-98ebad8815a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.090353] env[68244]: DEBUG nova.compute.manager [req-e9241ccd-78f3-438d-8292-ebf32118dff5 req-0b0c6214-fa4e-48fc-aeea-296057267169 service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Received event network-vif-plugged-df771f98-b8ac-43c4-8f5b-d09501711a88 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 747.090353] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9241ccd-78f3-438d-8292-ebf32118dff5 req-0b0c6214-fa4e-48fc-aeea-296057267169 service nova] Acquiring lock "b0090ea8-98fe-42a0-97cc-40d7578851a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.090353] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9241ccd-78f3-438d-8292-ebf32118dff5 req-0b0c6214-fa4e-48fc-aeea-296057267169 service nova] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.090353] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9241ccd-78f3-438d-8292-ebf32118dff5 req-0b0c6214-fa4e-48fc-aeea-296057267169 service nova] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.090353] env[68244]: DEBUG nova.compute.manager [req-e9241ccd-78f3-438d-8292-ebf32118dff5 req-0b0c6214-fa4e-48fc-aeea-296057267169 service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] No waiting events found dispatching network-vif-plugged-df771f98-b8ac-43c4-8f5b-d09501711a88 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 747.090353] env[68244]: WARNING nova.compute.manager [req-e9241ccd-78f3-438d-8292-ebf32118dff5 req-0b0c6214-fa4e-48fc-aeea-296057267169 service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Received unexpected event network-vif-plugged-df771f98-b8ac-43c4-8f5b-d09501711a88 for instance with vm_state building and task_state spawning. [ 747.103778] env[68244]: DEBUG nova.scheduler.client.report [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.132666] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780132, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.539613} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.133099] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/e2099d6d-5ab7-4a3e-8034-a3b4fc422749.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 747.133203] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 747.133437] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1ed7c44c-7a4e-47c2-b738-c0b4e0e4a73c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.154374] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 747.154374] env[68244]: value = "task-2780137" [ 747.154374] env[68244]: _type = "Task" [ 747.154374] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.157117] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.423501] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780136, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.455322] env[68244]: DEBUG nova.network.neutron [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Successfully updated port: df771f98-b8ac-43c4-8f5b-d09501711a88 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.482675] env[68244]: INFO nova.compute.manager [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Took 49.17 seconds to build instance. [ 747.608063] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.608609] env[68244]: DEBUG nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 747.611809] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.730s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.611979] env[68244]: DEBUG nova.objects.instance [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lazy-loading 'resources' on Instance uuid d73f87d2-41b3-4396-b5b5-932f8c6bf626 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.656998] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.924987] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780136, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.959794] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.959794] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquired lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.959794] env[68244]: DEBUG nova.network.neutron [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 747.987922] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5027452e-c955-4b0e-a087-df8c91e9bdc6 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "bbc08614-926e-4209-abec-4808f223943a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.363s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.119567] env[68244]: DEBUG nova.compute.utils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 748.122543] env[68244]: DEBUG nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 748.122543] env[68244]: DEBUG nova.network.neutron [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 748.158445] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.981464} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.158445] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 748.159461] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf45bd7-8859-4456-b6e8-f5d83cee38e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.192657] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/e2099d6d-5ab7-4a3e-8034-a3b4fc422749.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 748.196179] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e058d819-3e7a-47a4-90c3-3bcb5dbe298f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.221108] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 748.221108] env[68244]: value = "task-2780138" [ 748.221108] env[68244]: _type = "Task" [ 748.221108] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.229593] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.269530] env[68244]: DEBUG nova.policy [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8c3f90f344a45c1861ef7fb32d4bfd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dcd37f739a7545e595cd423d24e810bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 748.422853] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780136, 'name': ReconfigVM_Task, 'duration_secs': 1.29319} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.426080] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance '10957648-8618-4f2c-8b08-5468bca20cfc' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 748.490209] env[68244]: DEBUG nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 748.506301] env[68244]: DEBUG nova.network.neutron [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.626196] env[68244]: DEBUG nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 748.731727] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780138, 'name': ReconfigVM_Task, 'duration_secs': 0.416994} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.734873] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Reconfigured VM instance instance-0000001b to attach disk [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/e2099d6d-5ab7-4a3e-8034-a3b4fc422749.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 748.736054] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5324af9f-2a76-4d2e-8ade-9c05d29f234e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.743080] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 748.743080] env[68244]: value = "task-2780139" [ 748.743080] env[68244]: _type = "Task" [ 748.743080] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.753785] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780139, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.760042] env[68244]: DEBUG nova.network.neutron [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Updating instance_info_cache with network_info: [{"id": "df771f98-b8ac-43c4-8f5b-d09501711a88", "address": "fa:16:3e:d8:21:66", "network": {"id": "dbd842b5-bca6-4e83-b77b-77fc147c26d0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1323392380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c8e956516544ccba9032a48f532d19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf771f98-b8", "ovs_interfaceid": "df771f98-b8ac-43c4-8f5b-d09501711a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.895406] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a541c5bc-de94-4762-bf3e-d81fe475aa4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.904920] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e42a6f0-4701-4769-b763-c071781890bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.908103] env[68244]: DEBUG nova.network.neutron [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Successfully created port: 1831bead-f9d8-4019-b2a9-1d401a809acf {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.941050] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 748.941340] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.941511] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 748.941703] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.941859] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 748.942063] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 748.942309] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 748.942488] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 748.942654] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 748.942830] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 748.943013] env[68244]: DEBUG nova.virt.hardware [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 748.948921] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Reconfiguring VM instance instance-00000011 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 748.949491] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-020d698e-6d74-4768-88f6-069234848a87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.964101] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4882e5-33db-42db-bb90-ab163b7181a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.972424] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0572a75b-536a-49cb-8758-58f0395b3284 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.977650] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 748.977650] env[68244]: value = "task-2780140" [ 748.977650] env[68244]: _type = "Task" [ 748.977650] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.989307] env[68244]: DEBUG nova.compute.provider_tree [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.998656] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780140, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.017897] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.255055] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780139, 'name': Rename_Task, 'duration_secs': 0.220981} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.255534] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 749.255894] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c769711-15d0-4daa-8af8-7345ca4c24a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.261546] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Releasing lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.261979] env[68244]: DEBUG nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Instance network_info: |[{"id": "df771f98-b8ac-43c4-8f5b-d09501711a88", "address": "fa:16:3e:d8:21:66", "network": {"id": "dbd842b5-bca6-4e83-b77b-77fc147c26d0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1323392380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c8e956516544ccba9032a48f532d19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf771f98-b8", "ovs_interfaceid": "df771f98-b8ac-43c4-8f5b-d09501711a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 749.263835] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:21:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df771f98-b8ac-43c4-8f5b-d09501711a88', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.272700] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Creating folder: Project (c9c8e956516544ccba9032a48f532d19). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.273202] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 749.273202] env[68244]: value = "task-2780141" [ 749.273202] env[68244]: _type = "Task" [ 749.273202] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.273928] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a62965f-6328-4b86-bcba-67a7f90cdcad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.284737] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780141, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.288402] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Created folder: Project (c9c8e956516544ccba9032a48f532d19) in parent group-v558876. [ 749.288402] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Creating folder: Instances. Parent ref: group-v558969. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.288402] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8526e26-fa24-4b7c-87ee-ce0423c4f118 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.297894] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Created folder: Instances in parent group-v558969. [ 749.298180] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.298398] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.298626] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-164dad8a-5bf6-4d72-a2d1-5cf8f0799b18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.318833] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.318833] env[68244]: value = "task-2780144" [ 749.318833] env[68244]: _type = "Task" [ 749.318833] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.328190] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780144, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.488494] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780140, 'name': ReconfigVM_Task, 'duration_secs': 0.199814} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.488839] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Reconfigured VM instance instance-00000011 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 749.489950] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d754e65b-561e-48f9-a982-f934607d9b48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.493274] env[68244]: DEBUG nova.scheduler.client.report [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.528852] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 10957648-8618-4f2c-8b08-5468bca20cfc/10957648-8618-4f2c-8b08-5468bca20cfc.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.530127] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8558496-8987-4161-bd6a-42b26b6843ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.552827] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 749.552827] env[68244]: value = "task-2780145" [ 749.552827] env[68244]: _type = "Task" [ 749.552827] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.562036] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.584134] env[68244]: DEBUG nova.compute.manager [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Received event network-changed-df771f98-b8ac-43c4-8f5b-d09501711a88 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 749.584387] env[68244]: DEBUG nova.compute.manager [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Refreshing instance network info cache due to event network-changed-df771f98-b8ac-43c4-8f5b-d09501711a88. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 749.584587] env[68244]: DEBUG oslo_concurrency.lockutils [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] Acquiring lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.584728] env[68244]: DEBUG oslo_concurrency.lockutils [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] Acquired lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.584891] env[68244]: DEBUG nova.network.neutron [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Refreshing network info cache for port df771f98-b8ac-43c4-8f5b-d09501711a88 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.637836] env[68244]: DEBUG nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 749.662776] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 749.663179] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.663381] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 749.663578] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.663757] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 749.663951] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 749.664232] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 749.664411] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 749.664590] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 749.664754] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 749.664928] env[68244]: DEBUG nova.virt.hardware [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 749.666217] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026fee07-d624-4fed-8118-0f8359244891 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.674670] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1a0d94-cbbc-41cf-95c3-1f1114a8c2b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.800222] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780141, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.831903] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780144, 'name': CreateVM_Task, 'duration_secs': 0.414775} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.832216] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.833332] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.833645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.834141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 749.834534] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-182b9ea4-a29e-4d2d-a26b-179ea5cce7b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.841409] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 749.841409] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e80b66-6669-1e87-0348-e9b9c5bbd5f1" [ 749.841409] env[68244]: _type = "Task" [ 749.841409] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.851289] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e80b66-6669-1e87-0348-e9b9c5bbd5f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.930822] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fccbbf-01a5-fe14-035c-def3ce2065a5/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 749.931841] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3acd74-a83b-4dc9-933d-8760d237e254 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.938181] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fccbbf-01a5-fe14-035c-def3ce2065a5/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 749.938462] env[68244]: ERROR oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fccbbf-01a5-fe14-035c-def3ce2065a5/disk-0.vmdk due to incomplete transfer. [ 749.938587] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-07f5fa55-ba5a-4d82-af3d-90c419a7768e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.945607] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fccbbf-01a5-fe14-035c-def3ce2065a5/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 749.945792] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Uploaded image 0428e551-b5ef-4ca7-9b75-5cb0cdbb76f1 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 749.947811] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 749.948120] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3ed19b4e-4484-4876-b910-15e2bd11330c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.954919] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 749.954919] env[68244]: value = "task-2780146" [ 749.954919] env[68244]: _type = "Task" [ 749.954919] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.962951] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780146, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.999293] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.387s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.002013] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.250s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.002250] env[68244]: DEBUG nova.objects.instance [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lazy-loading 'resources' on Instance uuid 03af8758-fba3-4173-b998-d9e6b3113f8c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.034246] env[68244]: INFO nova.scheduler.client.report [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted allocations for instance d73f87d2-41b3-4396-b5b5-932f8c6bf626 [ 750.063497] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780145, 'name': ReconfigVM_Task, 'duration_secs': 0.48293} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.063829] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 10957648-8618-4f2c-8b08-5468bca20cfc/10957648-8618-4f2c-8b08-5468bca20cfc.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.064156] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance '10957648-8618-4f2c-8b08-5468bca20cfc' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 750.288757] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780141, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.295466] env[68244]: DEBUG nova.network.neutron [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Updated VIF entry in instance network info cache for port df771f98-b8ac-43c4-8f5b-d09501711a88. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.295850] env[68244]: DEBUG nova.network.neutron [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Updating instance_info_cache with network_info: [{"id": "df771f98-b8ac-43c4-8f5b-d09501711a88", "address": "fa:16:3e:d8:21:66", "network": {"id": "dbd842b5-bca6-4e83-b77b-77fc147c26d0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1323392380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c8e956516544ccba9032a48f532d19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf771f98-b8", "ovs_interfaceid": "df771f98-b8ac-43c4-8f5b-d09501711a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.350779] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e80b66-6669-1e87-0348-e9b9c5bbd5f1, 'name': SearchDatastore_Task, 'duration_secs': 0.01621} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.351115] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.351350] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.351583] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.351731] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.351910] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.352201] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-401e59ca-428a-4a0d-bbaa-334b15cece5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.361009] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.361216] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 750.361945] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4eb7ee72-dacf-4b6d-960a-496036288c1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.366859] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 750.366859] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5257f9f7-f980-a165-a52b-31bc43d4dc51" [ 750.366859] env[68244]: _type = "Task" [ 750.366859] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.374795] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5257f9f7-f980-a165-a52b-31bc43d4dc51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.468352] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780146, 'name': Destroy_Task, 'duration_secs': 0.350383} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.468743] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Destroyed the VM [ 750.469208] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 750.469547] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d1d1d8c6-9794-4752-b111-533c5701124d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.476653] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 750.476653] env[68244]: value = "task-2780147" [ 750.476653] env[68244]: _type = "Task" [ 750.476653] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.487579] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780147, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.542558] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c23f0f4c-a74b-45b0-974e-6ab37fc32961 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "d73f87d2-41b3-4396-b5b5-932f8c6bf626" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.474s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.571734] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cd494c-3e7f-481d-aa24-c5aee78e5b52 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.600064] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d796f816-766e-4015-8977-ed287cf983ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.621445] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance '10957648-8618-4f2c-8b08-5468bca20cfc' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 750.799920] env[68244]: DEBUG oslo_vmware.api [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780141, 'name': PowerOnVM_Task, 'duration_secs': 1.051582} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.800763] env[68244]: DEBUG oslo_concurrency.lockutils [req-9bbe78e2-bf61-4888-b316-1f889103f050 req-84876157-980e-45ad-9711-d26ab84ced2f service nova] Releasing lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.801442] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 750.801884] env[68244]: INFO nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Took 10.47 seconds to spawn the instance on the hypervisor. [ 750.802226] env[68244]: DEBUG nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 750.805608] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6a5ddc-bcac-4dfe-b696-b71ddca22436 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.877133] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5257f9f7-f980-a165-a52b-31bc43d4dc51, 'name': SearchDatastore_Task, 'duration_secs': 0.009346} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.878043] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc71e30-4e6e-4d4a-aaea-7eb1aa19d3a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.887425] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 750.887425] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281485b-3e4d-9dd1-14a6-5028947e752c" [ 750.887425] env[68244]: _type = "Task" [ 750.887425] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.898163] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281485b-3e4d-9dd1-14a6-5028947e752c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.940563] env[68244]: DEBUG nova.network.neutron [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Successfully updated port: 1831bead-f9d8-4019-b2a9-1d401a809acf {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 750.986030] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780147, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.173470] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc142966-fdab-4511-af9c-df95f030ddfd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.181685] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b950fc-d1d6-467b-a727-1fcecca82a8b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.218510] env[68244]: DEBUG nova.network.neutron [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Port 4bc0d0f1-ef11-425c-987c-514c9b55015f binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 751.220536] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc99b0f-545e-468b-8241-c408c4564b7a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.229435] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387bbddf-6e12-4208-9b86-7433ac18c7a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.246552] env[68244]: DEBUG nova.compute.provider_tree [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.328931] env[68244]: INFO nova.compute.manager [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Took 45.81 seconds to build instance. [ 751.402427] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281485b-3e4d-9dd1-14a6-5028947e752c, 'name': SearchDatastore_Task, 'duration_secs': 0.01176} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.402767] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.404823] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b0090ea8-98fe-42a0-97cc-40d7578851a9/b0090ea8-98fe-42a0-97cc-40d7578851a9.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 751.405125] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e614ac7b-ed28-47a4-8db0-370c868a057b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.413487] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 751.413487] env[68244]: value = "task-2780148" [ 751.413487] env[68244]: _type = "Task" [ 751.413487] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.425619] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780148, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.447842] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "refresh_cache-09ab8712-0f7a-4122-9d61-19da3e65d22b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.447934] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired lock "refresh_cache-09ab8712-0f7a-4122-9d61-19da3e65d22b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.448122] env[68244]: DEBUG nova.network.neutron [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.486647] env[68244]: DEBUG oslo_vmware.api [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780147, 'name': RemoveSnapshot_Task, 'duration_secs': 0.757319} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.486902] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 751.487131] env[68244]: INFO nova.compute.manager [None req-c10f3f20-9397-40cc-b47e-d94ebbe8836c tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Took 19.92 seconds to snapshot the instance on the hypervisor. [ 751.753216] env[68244]: DEBUG nova.scheduler.client.report [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.799767] env[68244]: DEBUG nova.compute.manager [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Received event network-changed-01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 751.799849] env[68244]: DEBUG nova.compute.manager [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Refreshing instance network info cache due to event network-changed-01eeb7b1-e851-425f-933e-fdd80ac8564b. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 751.800109] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Acquiring lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.800295] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Acquired lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.800470] env[68244]: DEBUG nova.network.neutron [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Refreshing network info cache for port 01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.831254] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79066636-5c3d-4ea6-b85d-d4c14bfb0224 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.319s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.925470] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780148, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455234} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.925736] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b0090ea8-98fe-42a0-97cc-40d7578851a9/b0090ea8-98fe-42a0-97cc-40d7578851a9.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.925950] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.926224] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7d0cd13-ea6f-4c87-9282-367acfe35f77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.933818] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 751.933818] env[68244]: value = "task-2780149" [ 751.933818] env[68244]: _type = "Task" [ 751.933818] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.941712] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780149, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.012145] env[68244]: DEBUG nova.network.neutron [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.245715] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.245953] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.246140] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.259517] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.258s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.261663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.244s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.264404] env[68244]: INFO nova.compute.claims [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.277857] env[68244]: DEBUG nova.network.neutron [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Updating instance_info_cache with network_info: [{"id": "1831bead-f9d8-4019-b2a9-1d401a809acf", "address": "fa:16:3e:41:a6:38", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1831bead-f9", "ovs_interfaceid": "1831bead-f9d8-4019-b2a9-1d401a809acf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.286831] env[68244]: INFO nova.scheduler.client.report [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted allocations for instance 03af8758-fba3-4173-b998-d9e6b3113f8c [ 752.333274] env[68244]: DEBUG nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 752.444556] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780149, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065351} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.444849] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.445670] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9889ec5d-dfcb-4f8f-a54d-bfc7f1106f72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.483859] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] b0090ea8-98fe-42a0-97cc-40d7578851a9/b0090ea8-98fe-42a0-97cc-40d7578851a9.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.487183] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02910a4b-a2d9-4e4a-9ebe-920902dcc0d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.516645] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 752.516645] env[68244]: value = "task-2780150" [ 752.516645] env[68244]: _type = "Task" [ 752.516645] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.527022] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.779179] env[68244]: DEBUG nova.network.neutron [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Updated VIF entry in instance network info cache for port 01eeb7b1-e851-425f-933e-fdd80ac8564b. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.779179] env[68244]: DEBUG nova.network.neutron [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Updating instance_info_cache with network_info: [{"id": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "address": "fa:16:3e:a2:26:1a", "network": {"id": "684a2004-5112-4bec-8575-c61b6101134d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1965396615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef5fff09e8174d0c8c09e5efc2164ab2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01eeb7b1-e8", "ovs_interfaceid": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.780150] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Releasing lock "refresh_cache-09ab8712-0f7a-4122-9d61-19da3e65d22b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.780405] env[68244]: DEBUG nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Instance network_info: |[{"id": "1831bead-f9d8-4019-b2a9-1d401a809acf", "address": "fa:16:3e:41:a6:38", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1831bead-f9", "ovs_interfaceid": "1831bead-f9d8-4019-b2a9-1d401a809acf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 752.780757] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:a6:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1831bead-f9d8-4019-b2a9-1d401a809acf', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.788792] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.790035] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 752.790035] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-114bb144-9055-444b-bc0b-96049f7deffb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.806897] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3231db8a-f1c6-4947-ab09-69407133b7f3 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "03af8758-fba3-4173-b998-d9e6b3113f8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.139s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.813034] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.813034] env[68244]: value = "task-2780151" [ 752.813034] env[68244]: _type = "Task" [ 752.813034] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.822410] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780151, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.857023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.026672] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.285055] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Releasing lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.288443] env[68244]: DEBUG nova.compute.manager [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Received event network-vif-plugged-1831bead-f9d8-4019-b2a9-1d401a809acf {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 753.288443] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Acquiring lock "09ab8712-0f7a-4122-9d61-19da3e65d22b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.288443] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.288443] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.288443] env[68244]: DEBUG nova.compute.manager [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] No waiting events found dispatching network-vif-plugged-1831bead-f9d8-4019-b2a9-1d401a809acf {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 753.288443] env[68244]: WARNING nova.compute.manager [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Received unexpected event network-vif-plugged-1831bead-f9d8-4019-b2a9-1d401a809acf for instance with vm_state building and task_state spawning. [ 753.289035] env[68244]: DEBUG nova.compute.manager [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Received event network-changed-1831bead-f9d8-4019-b2a9-1d401a809acf {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 753.289035] env[68244]: DEBUG nova.compute.manager [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Refreshing instance network info cache due to event network-changed-1831bead-f9d8-4019-b2a9-1d401a809acf. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 753.289035] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Acquiring lock "refresh_cache-09ab8712-0f7a-4122-9d61-19da3e65d22b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.289467] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Acquired lock "refresh_cache-09ab8712-0f7a-4122-9d61-19da3e65d22b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.289467] env[68244]: DEBUG nova.network.neutron [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Refreshing network info cache for port 1831bead-f9d8-4019-b2a9-1d401a809acf {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.324198] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780151, 'name': CreateVM_Task, 'duration_secs': 0.352036} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.324401] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 753.325189] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.325457] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.325803] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 753.326403] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1f94a5a-807c-4890-a0f2-8de242c94cbd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.332112] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 753.332112] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c9e125-38d7-7849-badc-83b073832ecd" [ 753.332112] env[68244]: _type = "Task" [ 753.332112] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.345894] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c9e125-38d7-7849-badc-83b073832ecd, 'name': SearchDatastore_Task, 'duration_secs': 0.010644} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.346239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.346436] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.346684] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.347155] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.347361] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.347725] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83a2e687-4bde-4b1b-8a21-91a1b7f8a279 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.355466] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.355757] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.361046] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.361046] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.361046] env[68244]: DEBUG nova.network.neutron [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.361374] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78d9a203-0fc4-4364-981c-cf752dcf7e17 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.366752] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 753.366752] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526b9c51-bb17-ea51-f880-ad3216984d6d" [ 753.366752] env[68244]: _type = "Task" [ 753.366752] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.378036] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526b9c51-bb17-ea51-f880-ad3216984d6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.527638] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780150, 'name': ReconfigVM_Task, 'duration_secs': 0.679904} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.530086] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Reconfigured VM instance instance-0000001c to attach disk [datastore2] b0090ea8-98fe-42a0-97cc-40d7578851a9/b0090ea8-98fe-42a0-97cc-40d7578851a9.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.530915] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f6cf648-8277-4da9-8a71-ba3db462d8e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.537484] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 753.537484] env[68244]: value = "task-2780152" [ 753.537484] env[68244]: _type = "Task" [ 753.537484] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.547673] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780152, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.884733] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526b9c51-bb17-ea51-f880-ad3216984d6d, 'name': SearchDatastore_Task, 'duration_secs': 0.008473} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.886033] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a7f5baa-03f2-4f3b-a209-f3bcf1cad003 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.892843] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 753.892843] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a47b0f-8f08-a7f8-71ca-5f6742d9097b" [ 753.892843] env[68244]: _type = "Task" [ 753.892843] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.904716] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a47b0f-8f08-a7f8-71ca-5f6742d9097b, 'name': SearchDatastore_Task, 'duration_secs': 0.009379} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.904716] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.904973] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 09ab8712-0f7a-4122-9d61-19da3e65d22b/09ab8712-0f7a-4122-9d61-19da3e65d22b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.905687] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc7ff72e-a346-4462-a5cb-bf6b0f0af91a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.916353] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 753.916353] env[68244]: value = "task-2780153" [ 753.916353] env[68244]: _type = "Task" [ 753.916353] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.924874] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780153, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.942177] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603426a8-3eb4-4f74-8a3a-dd6260974450 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.950937] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8262c235-2472-4977-ba4c-2d6f234002ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.990053] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1885433-6789-47dc-a89b-316f3e2ced4f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.998744] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87025e4c-986f-448c-960a-a6b51fb40774 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.013922] env[68244]: DEBUG nova.compute.provider_tree [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.048678] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780152, 'name': Rename_Task, 'duration_secs': 0.373137} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.051920] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 754.052510] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-372dfcb3-0cb9-43c3-8165-8e9d295735c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.062415] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 754.062415] env[68244]: value = "task-2780154" [ 754.062415] env[68244]: _type = "Task" [ 754.062415] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.073856] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.126511] env[68244]: DEBUG nova.network.neutron [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance_info_cache with network_info: [{"id": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "address": "fa:16:3e:78:0b:4f", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bc0d0f1-ef", "ovs_interfaceid": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.252169] env[68244]: DEBUG nova.network.neutron [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Updated VIF entry in instance network info cache for port 1831bead-f9d8-4019-b2a9-1d401a809acf. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 754.252559] env[68244]: DEBUG nova.network.neutron [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Updating instance_info_cache with network_info: [{"id": "1831bead-f9d8-4019-b2a9-1d401a809acf", "address": "fa:16:3e:41:a6:38", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1831bead-f9", "ovs_interfaceid": "1831bead-f9d8-4019-b2a9-1d401a809acf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.358993] env[68244]: DEBUG nova.compute.manager [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Received event network-changed-01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 754.358993] env[68244]: DEBUG nova.compute.manager [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Refreshing instance network info cache due to event network-changed-01eeb7b1-e851-425f-933e-fdd80ac8564b. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 754.358993] env[68244]: DEBUG oslo_concurrency.lockutils [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] Acquiring lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.358993] env[68244]: DEBUG oslo_concurrency.lockutils [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] Acquired lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.359706] env[68244]: DEBUG nova.network.neutron [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Refreshing network info cache for port 01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.425505] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780153, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48608} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.425764] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 09ab8712-0f7a-4122-9d61-19da3e65d22b/09ab8712-0f7a-4122-9d61-19da3e65d22b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.425976] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.426233] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98b200d5-6936-4d36-8931-79effd4bd8d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.432851] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 754.432851] env[68244]: value = "task-2780155" [ 754.432851] env[68244]: _type = "Task" [ 754.432851] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.441162] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.519094] env[68244]: DEBUG nova.scheduler.client.report [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 754.573714] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780154, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.629639] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.755633] env[68244]: DEBUG oslo_concurrency.lockutils [req-19dc2fda-b51d-4917-b1fc-fdfca48340be req-7455ae93-6d98-4383-83d0-7c02848deadb service nova] Releasing lock "refresh_cache-09ab8712-0f7a-4122-9d61-19da3e65d22b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.943953] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059153} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.944249] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.945096] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb04348d-fecc-4826-9e5a-cd944deae10d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.967913] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 09ab8712-0f7a-4122-9d61-19da3e65d22b/09ab8712-0f7a-4122-9d61-19da3e65d22b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.968250] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-138ddb59-03f7-455c-9fc5-6882c0448c52 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.987851] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "59b0dd89-0093-4e50-9428-8db5c7fd429d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.988177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.988431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "59b0dd89-0093-4e50-9428-8db5c7fd429d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.988903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.988903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.991427] env[68244]: INFO nova.compute.manager [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Terminating instance [ 754.999271] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 754.999271] env[68244]: value = "task-2780156" [ 754.999271] env[68244]: _type = "Task" [ 754.999271] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.012607] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780156, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.024945] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.025557] env[68244]: DEBUG nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 755.028795] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.221s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.030382] env[68244]: INFO nova.compute.claims [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.073988] env[68244]: DEBUG oslo_vmware.api [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780154, 'name': PowerOnVM_Task, 'duration_secs': 1.001864} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.074363] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.074630] env[68244]: INFO nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Took 9.23 seconds to spawn the instance on the hypervisor. [ 755.074763] env[68244]: DEBUG nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 755.075742] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc01506-5a15-48f9-aeb9-e1225c0039b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.153305] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1392a781-797c-4cdf-875c-e0f64c799006 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.174017] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f34c0e-34f0-4e2c-b2b9-f5d12692f1d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.180152] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance '10957648-8618-4f2c-8b08-5468bca20cfc' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 755.250108] env[68244]: DEBUG nova.network.neutron [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Updated VIF entry in instance network info cache for port 01eeb7b1-e851-425f-933e-fdd80ac8564b. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.250483] env[68244]: DEBUG nova.network.neutron [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Updating instance_info_cache with network_info: [{"id": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "address": "fa:16:3e:a2:26:1a", "network": {"id": "684a2004-5112-4bec-8575-c61b6101134d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1965396615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef5fff09e8174d0c8c09e5efc2164ab2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4ad9ee0f-6a58-4a7b-bda3-5249b8cef84e", "external-id": "nsx-vlan-transportzone-354", "segmentation_id": 354, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01eeb7b1-e8", "ovs_interfaceid": "01eeb7b1-e851-425f-933e-fdd80ac8564b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.496076] env[68244]: DEBUG nova.compute.manager [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 755.496362] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.497108] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d1a5ef-322b-4e9a-ae11-a3360a9a192e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.508358] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780156, 'name': ReconfigVM_Task, 'duration_secs': 0.276345} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.510726] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 09ab8712-0f7a-4122-9d61-19da3e65d22b/09ab8712-0f7a-4122-9d61-19da3e65d22b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.511084] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.511263] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65defb02-41fe-44d6-a547-c155aca208ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.512794] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d3ccf4f-3c6b-4b0f-bebb-b31500cc8ea3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.518589] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 755.518589] env[68244]: value = "task-2780157" [ 755.518589] env[68244]: _type = "Task" [ 755.518589] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.522958] env[68244]: DEBUG oslo_vmware.api [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 755.522958] env[68244]: value = "task-2780158" [ 755.522958] env[68244]: _type = "Task" [ 755.522958] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.529178] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780157, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.536446] env[68244]: DEBUG nova.compute.utils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 755.539687] env[68244]: DEBUG oslo_vmware.api [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.540289] env[68244]: DEBUG nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 755.541037] env[68244]: DEBUG nova.network.neutron [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 755.585830] env[68244]: DEBUG nova.policy [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cddbad2016a4b98b5c05082a13f59f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87c0473d09c04fb8a80d27a43c07bef4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 755.595278] env[68244]: INFO nova.compute.manager [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Took 46.67 seconds to build instance. [ 755.686668] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.686993] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef904a97-eb32-4644-b8b5-99e0709956e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.695328] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 755.695328] env[68244]: value = "task-2780159" [ 755.695328] env[68244]: _type = "Task" [ 755.695328] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.703703] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780159, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.753577] env[68244]: DEBUG oslo_concurrency.lockutils [req-d11f5eba-3016-4d35-9d1a-532b8a0aca75 req-62782380-939f-470f-a98c-3b6e26a39396 service nova] Releasing lock "refresh_cache-bbc08614-926e-4209-abec-4808f223943a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.793567] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "bbc08614-926e-4209-abec-4808f223943a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.795059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "bbc08614-926e-4209-abec-4808f223943a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.795059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "bbc08614-926e-4209-abec-4808f223943a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.795059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "bbc08614-926e-4209-abec-4808f223943a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.795059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "bbc08614-926e-4209-abec-4808f223943a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.797156] env[68244]: INFO nova.compute.manager [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Terminating instance [ 756.010238] env[68244]: DEBUG nova.network.neutron [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Successfully created port: a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.031707] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780157, 'name': Rename_Task, 'duration_secs': 0.218628} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.033291] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.038102] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8dacd38a-bb89-469e-add8-91720d6656c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.040359] env[68244]: DEBUG oslo_vmware.api [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780158, 'name': PowerOffVM_Task, 'duration_secs': 0.228497} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.043907] env[68244]: DEBUG nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 756.046737] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.047122] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.048197] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f588c60-69ef-4173-9444-5e5a895b7142 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.053552] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 756.053552] env[68244]: value = "task-2780160" [ 756.053552] env[68244]: _type = "Task" [ 756.053552] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.063080] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780160, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.077088] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.097150] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46faaab1-911c-43d8-b375-ec544b98bd9f tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.075s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.098395] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.022s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.098580] env[68244]: INFO nova.compute.manager [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Rebooting instance [ 756.117056] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.117650] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.117768] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Deleting the datastore file [datastore2] 59b0dd89-0093-4e50-9428-8db5c7fd429d {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.117933] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2896c90-f770-45dc-bca6-cddedb8a8afe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.125890] env[68244]: DEBUG oslo_vmware.api [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for the task: (returnval){ [ 756.125890] env[68244]: value = "task-2780162" [ 756.125890] env[68244]: _type = "Task" [ 756.125890] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.135227] env[68244]: DEBUG oslo_vmware.api [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.208383] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780159, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.301576] env[68244]: DEBUG nova.compute.manager [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 756.301948] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.303145] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c79a762-1baf-4d36-95c0-9ce8d49e1598 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.315148] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 756.315776] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3823b23e-34c6-48be-9a18-7d45db70501a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.322098] env[68244]: DEBUG oslo_vmware.api [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 756.322098] env[68244]: value = "task-2780163" [ 756.322098] env[68244]: _type = "Task" [ 756.322098] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.330532] env[68244]: DEBUG oslo_vmware.api [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.565767] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780160, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.593453] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bcce5d-aa82-4c2a-b198-cc48e51adf41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.601648] env[68244]: DEBUG nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 756.617439] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e43907-93bd-4f56-a841-8ca11ea67a68 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.650806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.650977] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquired lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.651157] env[68244]: DEBUG nova.network.neutron [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.655628] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8df2fa-d2c5-4741-9ddd-60039bfc9c0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.666219] env[68244]: DEBUG oslo_vmware.api [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Task: {'id': task-2780162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145389} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.668445] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.668647] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 756.668826] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 756.668997] env[68244]: INFO nova.compute.manager [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Took 1.17 seconds to destroy the instance on the hypervisor. [ 756.669258] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.669556] env[68244]: DEBUG nova.compute.manager [-] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 756.669667] env[68244]: DEBUG nova.network.neutron [-] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 756.672310] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df1219c-c164-4657-bbaa-9e4733f217c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.688261] env[68244]: DEBUG nova.compute.provider_tree [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.706432] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780159, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.831659] env[68244]: DEBUG oslo_vmware.api [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780163, 'name': PowerOffVM_Task, 'duration_secs': 0.204467} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.831923] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.834271] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.834271] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5aa85fb6-1a1e-4a51-a317-d19b9d22b2e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.877067] env[68244]: DEBUG nova.network.neutron [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Updating instance_info_cache with network_info: [{"id": "df771f98-b8ac-43c4-8f5b-d09501711a88", "address": "fa:16:3e:d8:21:66", "network": {"id": "dbd842b5-bca6-4e83-b77b-77fc147c26d0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1323392380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c8e956516544ccba9032a48f532d19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf771f98-b8", "ovs_interfaceid": "df771f98-b8ac-43c4-8f5b-d09501711a88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.902220] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.902443] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.902622] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Deleting the datastore file [datastore2] bbc08614-926e-4209-abec-4808f223943a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.902884] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c7a635f-d6bc-40f0-831c-1261a18047b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.910227] env[68244]: DEBUG oslo_vmware.api [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for the task: (returnval){ [ 756.910227] env[68244]: value = "task-2780165" [ 756.910227] env[68244]: _type = "Task" [ 756.910227] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.918512] env[68244]: DEBUG oslo_vmware.api [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780165, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.056424] env[68244]: DEBUG nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 757.068738] env[68244]: DEBUG oslo_vmware.api [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780160, 'name': PowerOnVM_Task, 'duration_secs': 0.524019} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.069032] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.069236] env[68244]: INFO nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Took 7.43 seconds to spawn the instance on the hypervisor. [ 757.069416] env[68244]: DEBUG nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.070205] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808e0844-3b7f-4be9-aefc-860d8b3cbd6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.092604] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 757.092832] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.092981] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 757.093171] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.093313] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 757.093496] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 757.093736] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 757.093892] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 757.094073] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 757.094242] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 757.094414] env[68244]: DEBUG nova.virt.hardware [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 757.095304] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57f9faa-47a4-43ba-85f9-c3b7378f5cc2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.099739] env[68244]: DEBUG nova.compute.manager [req-0adcc3a6-e57f-4624-b7c9-e4e43321f9fb req-2445bf93-fe2c-4025-87a3-7bfdbf75a62f service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Received event network-vif-deleted-454e7dd1-22ef-4014-9597-5df4c82d0759 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 757.100350] env[68244]: INFO nova.compute.manager [req-0adcc3a6-e57f-4624-b7c9-e4e43321f9fb req-2445bf93-fe2c-4025-87a3-7bfdbf75a62f service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Neutron deleted interface 454e7dd1-22ef-4014-9597-5df4c82d0759; detaching it from the instance and deleting it from the info cache [ 757.100350] env[68244]: DEBUG nova.network.neutron [req-0adcc3a6-e57f-4624-b7c9-e4e43321f9fb req-2445bf93-fe2c-4025-87a3-7bfdbf75a62f service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.107748] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad015f95-ccb0-46c5-b1ad-60d16acd656b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.141718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.191384] env[68244]: DEBUG nova.scheduler.client.report [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.207460] env[68244]: DEBUG oslo_vmware.api [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780159, 'name': PowerOnVM_Task, 'duration_secs': 1.37304} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.208375] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.208570] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f62b3647-da14-4ca7-83b8-9fac7a8b24f0 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance '10957648-8618-4f2c-8b08-5468bca20cfc' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 757.381929] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Releasing lock "refresh_cache-b0090ea8-98fe-42a0-97cc-40d7578851a9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.419904] env[68244]: DEBUG oslo_vmware.api [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Task: {'id': task-2780165, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14275} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.420147] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 757.420330] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 757.420507] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 757.420678] env[68244]: INFO nova.compute.manager [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] [instance: bbc08614-926e-4209-abec-4808f223943a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 757.420924] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.421228] env[68244]: DEBUG nova.compute.manager [-] [instance: bbc08614-926e-4209-abec-4808f223943a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.421228] env[68244]: DEBUG nova.network.neutron [-] [instance: bbc08614-926e-4209-abec-4808f223943a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.544526] env[68244]: DEBUG nova.network.neutron [-] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.593027] env[68244]: INFO nova.compute.manager [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Took 42.00 seconds to build instance. [ 757.604399] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91eee720-c2f9-4cb1-b943-6a306bf6aa54 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.614311] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a832a4-5147-4305-b1ea-f5cfa474886e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.647747] env[68244]: DEBUG nova.compute.manager [req-0adcc3a6-e57f-4624-b7c9-e4e43321f9fb req-2445bf93-fe2c-4025-87a3-7bfdbf75a62f service nova] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Detach interface failed, port_id=454e7dd1-22ef-4014-9597-5df4c82d0759, reason: Instance 59b0dd89-0093-4e50-9428-8db5c7fd429d could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 757.696743] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.697385] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 757.700276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.162s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.700490] env[68244]: DEBUG nova.objects.instance [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lazy-loading 'resources' on Instance uuid fe873e92-1481-4c5f-b4ca-90e052bd10c0 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 757.887368] env[68244]: DEBUG nova.compute.manager [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.887533] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a4b0b4-60fb-4bb2-93c2-a66b1efc6308 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.894403] env[68244]: DEBUG nova.network.neutron [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Successfully updated port: a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 758.047914] env[68244]: INFO nova.compute.manager [-] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Took 1.38 seconds to deallocate network for instance. [ 758.095243] env[68244]: DEBUG oslo_concurrency.lockutils [None req-60511bb7-ae9e-45ff-ae71-c585608e5dc7 tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.475s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.206816] env[68244]: DEBUG nova.compute.utils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 758.208468] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 758.208614] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.311868] env[68244]: DEBUG nova.policy [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fd06cb45af94bb88bcf0e4399fe5265', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de3029d574fa4130b5fbbf34d2e39668', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 758.397938] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.398126] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.398306] env[68244]: DEBUG nova.network.neutron [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.399797] env[68244]: DEBUG nova.network.neutron [-] [instance: bbc08614-926e-4209-abec-4808f223943a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.554991] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.598248] env[68244]: DEBUG nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.647907] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfe0942-914a-4429-92c9-b29ddf11fd8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.657933] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf99890b-36d5-4bc1-9d93-04c077bda6d2 tempest-ServersAdminNegativeTestJSON-1094011989 tempest-ServersAdminNegativeTestJSON-1094011989-project-admin] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Suspending the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 758.657933] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ead17dcf-6640-47f4-b4e1-c6a1680704e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.668652] env[68244]: DEBUG oslo_vmware.api [None req-bf99890b-36d5-4bc1-9d93-04c077bda6d2 tempest-ServersAdminNegativeTestJSON-1094011989 tempest-ServersAdminNegativeTestJSON-1094011989-project-admin] Waiting for the task: (returnval){ [ 758.668652] env[68244]: value = "task-2780166" [ 758.668652] env[68244]: _type = "Task" [ 758.668652] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.678243] env[68244]: DEBUG oslo_vmware.api [None req-bf99890b-36d5-4bc1-9d93-04c077bda6d2 tempest-ServersAdminNegativeTestJSON-1094011989 tempest-ServersAdminNegativeTestJSON-1094011989-project-admin] Task: {'id': task-2780166, 'name': SuspendVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.716177] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 758.769206] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Successfully created port: e6164ba3-acf1-412b-b790-6713822c1144 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.775585] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9234313a-2e05-48c0-88c8-de6eefbe1a0c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.783084] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d371b965-e259-4927-930e-499b9aa5f94a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.816706] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d71096-5bd4-422a-9869-dc525d27b9f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.824178] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa1962e-a0e0-4bb7-9715-812c302e6c4f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.838073] env[68244]: DEBUG nova.compute.provider_tree [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.904480] env[68244]: INFO nova.compute.manager [-] [instance: bbc08614-926e-4209-abec-4808f223943a] Took 1.48 seconds to deallocate network for instance. [ 758.913186] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5433ba91-4669-47be-95fa-ceb83e8f577b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.927506] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Doing hard reboot of VM {{(pid=68244) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 758.927894] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0c5b2b5d-15be-49cc-b149-09adb9f41466 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.935910] env[68244]: DEBUG oslo_vmware.api [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 758.935910] env[68244]: value = "task-2780167" [ 758.935910] env[68244]: _type = "Task" [ 758.935910] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.947367] env[68244]: DEBUG oslo_vmware.api [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780167, 'name': ResetVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.992220] env[68244]: DEBUG nova.network.neutron [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.123036] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.179932] env[68244]: DEBUG oslo_vmware.api [None req-bf99890b-36d5-4bc1-9d93-04c077bda6d2 tempest-ServersAdminNegativeTestJSON-1094011989 tempest-ServersAdminNegativeTestJSON-1094011989-project-admin] Task: {'id': task-2780166, 'name': SuspendVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.341863] env[68244]: DEBUG nova.scheduler.client.report [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 759.424362] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.452879] env[68244]: DEBUG oslo_vmware.api [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780167, 'name': ResetVM_Task, 'duration_secs': 0.100561} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.453153] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Did hard reboot of VM {{(pid=68244) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 759.453341] env[68244]: DEBUG nova.compute.manager [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.454124] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29326900-9d42-41a0-acc0-96eb645bf310 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.530092] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "10957648-8618-4f2c-8b08-5468bca20cfc" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.531091] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.531091] env[68244]: DEBUG nova.compute.manager [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Going to confirm migration 1 {{(pid=68244) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 759.568159] env[68244]: DEBUG nova.network.neutron [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Updating instance_info_cache with network_info: [{"id": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "address": "fa:16:3e:c6:34:d3", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b7e2c4-a3", "ovs_interfaceid": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.679560] env[68244]: DEBUG oslo_vmware.api [None req-bf99890b-36d5-4bc1-9d93-04c077bda6d2 tempest-ServersAdminNegativeTestJSON-1094011989 tempest-ServersAdminNegativeTestJSON-1094011989-project-admin] Task: {'id': task-2780166, 'name': SuspendVM_Task, 'duration_secs': 0.657068} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.679901] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf99890b-36d5-4bc1-9d93-04c077bda6d2 tempest-ServersAdminNegativeTestJSON-1094011989 tempest-ServersAdminNegativeTestJSON-1094011989-project-admin] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Suspended the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 759.680043] env[68244]: DEBUG nova.compute.manager [None req-bf99890b-36d5-4bc1-9d93-04c077bda6d2 tempest-ServersAdminNegativeTestJSON-1094011989 tempest-ServersAdminNegativeTestJSON-1094011989-project-admin] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.680852] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c5cedd-80b0-453c-b402-4353d5d6c21d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.727279] env[68244]: DEBUG nova.compute.manager [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Received event network-vif-plugged-a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 759.727330] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] Acquiring lock "7778c027-d4af-436c-a545-aa513c0b1127-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.727501] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] Lock "7778c027-d4af-436c-a545-aa513c0b1127-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.727681] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] Lock "7778c027-d4af-436c-a545-aa513c0b1127-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.727875] env[68244]: DEBUG nova.compute.manager [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] No waiting events found dispatching network-vif-plugged-a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 759.728018] env[68244]: WARNING nova.compute.manager [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Received unexpected event network-vif-plugged-a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a for instance with vm_state building and task_state spawning. [ 759.728196] env[68244]: DEBUG nova.compute.manager [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Received event network-changed-a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 759.728349] env[68244]: DEBUG nova.compute.manager [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Refreshing instance network info cache due to event network-changed-a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 759.728511] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] Acquiring lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.729905] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 759.761056] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 759.761230] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.761428] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 759.761624] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.761769] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 759.761941] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 759.762711] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 759.762949] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 759.763217] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 759.763449] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 759.763686] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 759.765394] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02600d70-875e-41ba-ad75-489065db7c9e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.774547] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07336582-7c71-4130-b27a-aedab6de11e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.848180] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.850535] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.312s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.850801] env[68244]: DEBUG nova.objects.instance [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lazy-loading 'resources' on Instance uuid cb607c5e-797d-4e52-9ba4-66113718dacc {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 759.875411] env[68244]: INFO nova.scheduler.client.report [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted allocations for instance fe873e92-1481-4c5f-b4ca-90e052bd10c0 [ 759.970905] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f354a69c-4010-48e6-8f15-5f1c6ac0bb18 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.872s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.070019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.070487] env[68244]: DEBUG nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Instance network_info: |[{"id": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "address": "fa:16:3e:c6:34:d3", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b7e2c4-a3", "ovs_interfaceid": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 760.070711] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] Acquired lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.071175] env[68244]: DEBUG nova.network.neutron [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Refreshing network info cache for port a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.072275] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:34:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98d96b75-ac36-499a-adc2-130c8c1d55ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.080430] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 760.080905] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.081610] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdb47af1-077e-480c-b671-5e74672be2d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.104805] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.104805] env[68244]: value = "task-2780168" [ 760.104805] env[68244]: _type = "Task" [ 760.104805] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.109127] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.109330] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.109501] env[68244]: DEBUG nova.network.neutron [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.109679] env[68244]: DEBUG nova.objects.instance [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lazy-loading 'info_cache' on Instance uuid 10957648-8618-4f2c-8b08-5468bca20cfc {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.113639] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780168, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.384716] env[68244]: DEBUG oslo_concurrency.lockutils [None req-153f7059-5f34-4de5-b23c-8869c346fcb2 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "fe873e92-1481-4c5f-b4ca-90e052bd10c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.240s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.613268] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780168, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.868075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45198cc0-6eae-4cb2-bb62-1c7541864ab2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.876099] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2805dd-1f4e-4a6c-b916-5e4368507b53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.906685] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab7ef2c-23ca-449e-b536-173309dc5288 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.916038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7be5ecc-284a-49cd-9ec3-697890fdfb77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.930618] env[68244]: DEBUG nova.compute.provider_tree [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.084746] env[68244]: DEBUG nova.network.neutron [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Updated VIF entry in instance network info cache for port a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 761.085181] env[68244]: DEBUG nova.network.neutron [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Updating instance_info_cache with network_info: [{"id": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "address": "fa:16:3e:c6:34:d3", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b7e2c4-a3", "ovs_interfaceid": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.117648] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780168, 'name': CreateVM_Task, 'duration_secs': 0.927703} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.117938] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.118550] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.118717] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.119068] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 761.121139] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb30d4ad-8f54-4288-829b-764c3465fde6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.126350] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 761.126350] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528f941a-fd5b-2c4e-fd81-83c2aac57603" [ 761.126350] env[68244]: _type = "Task" [ 761.126350] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.134434] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528f941a-fd5b-2c4e-fd81-83c2aac57603, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.258118] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Successfully updated port: e6164ba3-acf1-412b-b790-6713822c1144 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.436109] env[68244]: DEBUG nova.scheduler.client.report [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 761.440416] env[68244]: DEBUG nova.network.neutron [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance_info_cache with network_info: [{"id": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "address": "fa:16:3e:78:0b:4f", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bc0d0f1-ef", "ovs_interfaceid": "4bc0d0f1-ef11-425c-987c-514c9b55015f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.587840] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] Releasing lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.588121] env[68244]: DEBUG nova.compute.manager [req-c8852d63-a2b2-48aa-b181-9a55f3717910 req-a6a454c1-be5f-4b21-bf3d-2e3ebedaba8f service nova] [instance: bbc08614-926e-4209-abec-4808f223943a] Received event network-vif-deleted-01eeb7b1-e851-425f-933e-fdd80ac8564b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.636602] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528f941a-fd5b-2c4e-fd81-83c2aac57603, 'name': SearchDatastore_Task, 'duration_secs': 0.01033} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.636926] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.637242] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.637499] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.637668] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.637920] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.638130] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcb529fa-ca7b-440f-9c2b-035fcfe7e418 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.649194] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.649945] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.650721] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e284b283-6e93-44b8-b6f5-4977489154ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.655919] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 761.655919] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5218a5e0-5e13-3f5e-15ba-31d684614aef" [ 761.655919] env[68244]: _type = "Task" [ 761.655919] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.663776] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5218a5e0-5e13-3f5e-15ba-31d684614aef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.758118] env[68244]: DEBUG nova.compute.manager [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Received event network-vif-plugged-e6164ba3-acf1-412b-b790-6713822c1144 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.758118] env[68244]: DEBUG oslo_concurrency.lockutils [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] Acquiring lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.758118] env[68244]: DEBUG oslo_concurrency.lockutils [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.758118] env[68244]: DEBUG oslo_concurrency.lockutils [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.758118] env[68244]: DEBUG nova.compute.manager [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] No waiting events found dispatching network-vif-plugged-e6164ba3-acf1-412b-b790-6713822c1144 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.758458] env[68244]: WARNING nova.compute.manager [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Received unexpected event network-vif-plugged-e6164ba3-acf1-412b-b790-6713822c1144 for instance with vm_state building and task_state spawning. [ 761.758458] env[68244]: DEBUG nova.compute.manager [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Received event network-changed-e6164ba3-acf1-412b-b790-6713822c1144 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.758517] env[68244]: DEBUG nova.compute.manager [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Refreshing instance network info cache due to event network-changed-e6164ba3-acf1-412b-b790-6713822c1144. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 761.758693] env[68244]: DEBUG oslo_concurrency.lockutils [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] Acquiring lock "refresh_cache-6abb889a-2e96-4aba-8e36-c4c8997dd4e2" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.758821] env[68244]: DEBUG oslo_concurrency.lockutils [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] Acquired lock "refresh_cache-6abb889a-2e96-4aba-8e36-c4c8997dd4e2" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.758981] env[68244]: DEBUG nova.network.neutron [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Refreshing network info cache for port e6164ba3-acf1-412b-b790-6713822c1144 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.765014] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "refresh_cache-6abb889a-2e96-4aba-8e36-c4c8997dd4e2" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.942760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.092s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.945875] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.109s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.946829] env[68244]: INFO nova.compute.claims [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.949706] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-10957648-8618-4f2c-8b08-5468bca20cfc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.950632] env[68244]: DEBUG nova.objects.instance [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lazy-loading 'migration_context' on Instance uuid 10957648-8618-4f2c-8b08-5468bca20cfc {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.975178] env[68244]: INFO nova.scheduler.client.report [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted allocations for instance cb607c5e-797d-4e52-9ba4-66113718dacc [ 762.045934] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.046440] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.046764] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "b0090ea8-98fe-42a0-97cc-40d7578851a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.046998] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.047194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.052864] env[68244]: INFO nova.compute.manager [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Terminating instance [ 762.167016] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5218a5e0-5e13-3f5e-15ba-31d684614aef, 'name': SearchDatastore_Task, 'duration_secs': 0.027765} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.167855] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53b8fc84-a656-4b72-80aa-4ceed50cfacd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.173191] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 762.173191] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522b6cb9-8c73-2cb4-bb4c-1194a81ab11f" [ 762.173191] env[68244]: _type = "Task" [ 762.173191] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.181131] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522b6cb9-8c73-2cb4-bb4c-1194a81ab11f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.234585] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "09ab8712-0f7a-4122-9d61-19da3e65d22b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.234924] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.235146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "09ab8712-0f7a-4122-9d61-19da3e65d22b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.235322] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.235509] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.238853] env[68244]: INFO nova.compute.manager [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Terminating instance [ 762.291884] env[68244]: DEBUG nova.network.neutron [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.392282] env[68244]: DEBUG nova.network.neutron [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.454514] env[68244]: DEBUG nova.objects.base [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Object Instance<10957648-8618-4f2c-8b08-5468bca20cfc> lazy-loaded attributes: info_cache,migration_context {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 762.455659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a817f7-8a1f-43ee-bf6e-c2939ea9085c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.476157] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b5d3957-9adc-454e-b064-cfb608f852b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.484519] env[68244]: DEBUG oslo_vmware.api [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 762.484519] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525e318b-e371-9e17-5fe4-82c2758e31f8" [ 762.484519] env[68244]: _type = "Task" [ 762.484519] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.485054] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d81b4e0-410f-4920-aa4b-33f4cfaa2861 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "cb607c5e-797d-4e52-9ba4-66113718dacc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.248s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.494978] env[68244]: DEBUG oslo_vmware.api [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525e318b-e371-9e17-5fe4-82c2758e31f8, 'name': SearchDatastore_Task, 'duration_secs': 0.007709} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.495280] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.557724] env[68244]: DEBUG nova.compute.manager [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 762.557724] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.558552] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c99370-e8d0-4040-a9f8-b1adfed2a0a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.566678] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.567211] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6d6aabe-c5a4-45fd-a06f-6a00aa76d0fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.573768] env[68244]: DEBUG oslo_vmware.api [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 762.573768] env[68244]: value = "task-2780169" [ 762.573768] env[68244]: _type = "Task" [ 762.573768] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.581640] env[68244]: DEBUG oslo_vmware.api [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780169, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.684192] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522b6cb9-8c73-2cb4-bb4c-1194a81ab11f, 'name': SearchDatastore_Task, 'duration_secs': 0.009241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.684476] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.684746] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/7778c027-d4af-436c-a545-aa513c0b1127.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.685015] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca0c0db6-da28-4bb4-bd95-625e73168104 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.692984] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 762.692984] env[68244]: value = "task-2780170" [ 762.692984] env[68244]: _type = "Task" [ 762.692984] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.701588] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780170, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.743366] env[68244]: DEBUG nova.compute.manager [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 762.743366] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.744020] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c07896f-67f5-444c-b0a4-0cead7106e13 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.752232] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.752491] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9dd3af2c-9ab4-4bb4-9b8b-135650f7d07e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.818489] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.818747] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.818954] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Deleting the datastore file [datastore2] 09ab8712-0f7a-4122-9d61-19da3e65d22b {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.819266] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d38a428-121d-46d0-a80b-191d717fa0e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.826937] env[68244]: DEBUG oslo_vmware.api [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 762.826937] env[68244]: value = "task-2780172" [ 762.826937] env[68244]: _type = "Task" [ 762.826937] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.834842] env[68244]: DEBUG oslo_vmware.api [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.894957] env[68244]: DEBUG oslo_concurrency.lockutils [req-d55920ba-2992-4f9b-838c-e3499bf58dca req-c959fb91-7c9f-422d-b89d-467f60be55e9 service nova] Releasing lock "refresh_cache-6abb889a-2e96-4aba-8e36-c4c8997dd4e2" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.895421] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "refresh_cache-6abb889a-2e96-4aba-8e36-c4c8997dd4e2" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.895591] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.086037] env[68244]: DEBUG oslo_vmware.api [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780169, 'name': PowerOffVM_Task, 'duration_secs': 0.228698} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.088293] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 763.088490] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 763.088999] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48117ede-ab5e-4c04-bda9-fdd0f36d2390 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.150667] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 763.151211] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 763.151617] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Deleting the datastore file [datastore2] b0090ea8-98fe-42a0-97cc-40d7578851a9 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 763.151676] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed34ad52-e8f2-4a76-8fd1-fdd60f950a2c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.166016] env[68244]: DEBUG oslo_vmware.api [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for the task: (returnval){ [ 763.166016] env[68244]: value = "task-2780174" [ 763.166016] env[68244]: _type = "Task" [ 763.166016] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.172894] env[68244]: DEBUG oslo_vmware.api [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.204662] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780170, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467842} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.204951] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/7778c027-d4af-436c-a545-aa513c0b1127.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 763.205194] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 763.205442] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d46937d-0f92-44a5-932b-429972498741 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.213462] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 763.213462] env[68244]: value = "task-2780175" [ 763.213462] env[68244]: _type = "Task" [ 763.213462] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.222423] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780175, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.338529] env[68244]: DEBUG oslo_vmware.api [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.420147} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.338529] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.338529] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.338646] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.338734] env[68244]: INFO nova.compute.manager [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 763.338969] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.339169] env[68244]: DEBUG nova.compute.manager [-] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 763.339257] env[68244]: DEBUG nova.network.neutron [-] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.470539] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.566224] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efefb67-b5ba-458b-bcfd-6528320a6d73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.574339] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6d8088-7e5d-4a3b-90ff-5ef9178d62bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.616631] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96045cb0-f084-4e88-bb81-990209ed2087 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.625416] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5eedfe2-c98f-4984-bbbc-b8a4e2bc5a2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.640837] env[68244]: DEBUG nova.compute.provider_tree [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.672803] env[68244]: DEBUG oslo_vmware.api [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Task: {'id': task-2780174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155118} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.673560] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.673980] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.674273] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.674473] env[68244]: INFO nova.compute.manager [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 763.674807] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.675018] env[68244]: DEBUG nova.compute.manager [-] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 763.675114] env[68244]: DEBUG nova.network.neutron [-] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.725039] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780175, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08242} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.725262] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 763.726069] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca7eaf2-5f44-488c-8d3c-a4a7f88d7f92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.751590] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/7778c027-d4af-436c-a545-aa513c0b1127.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 763.753946] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee57dee8-6a7d-4e16-a731-f6dc0ee0c3dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.779140] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 763.779140] env[68244]: value = "task-2780176" [ 763.779140] env[68244]: _type = "Task" [ 763.779140] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.786959] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780176, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.799123] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Updating instance_info_cache with network_info: [{"id": "e6164ba3-acf1-412b-b790-6713822c1144", "address": "fa:16:3e:c7:cd:23", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6164ba3-ac", "ovs_interfaceid": "e6164ba3-acf1-412b-b790-6713822c1144", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.838331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "184f7694-9cab-4184-a1c0-926763a81baf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.838885] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "184f7694-9cab-4184-a1c0-926763a81baf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.876613] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "874d6895-0f3d-4a99-b27a-cad627ddeecd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.877093] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.883690] env[68244]: DEBUG nova.compute.manager [req-ce2d0bec-71d9-40f2-b012-c9b8b5e96259 req-e191e40c-8831-4fb1-be3a-cc0b5bd90beb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Received event network-vif-deleted-1831bead-f9d8-4019-b2a9-1d401a809acf {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 763.883970] env[68244]: INFO nova.compute.manager [req-ce2d0bec-71d9-40f2-b012-c9b8b5e96259 req-e191e40c-8831-4fb1-be3a-cc0b5bd90beb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Neutron deleted interface 1831bead-f9d8-4019-b2a9-1d401a809acf; detaching it from the instance and deleting it from the info cache [ 763.884124] env[68244]: DEBUG nova.network.neutron [req-ce2d0bec-71d9-40f2-b012-c9b8b5e96259 req-e191e40c-8831-4fb1-be3a-cc0b5bd90beb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.965695] env[68244]: DEBUG nova.compute.manager [req-2208dc45-478e-4c90-b1a8-2b0ebd1b31ba req-69a32b84-9bd4-4ccf-93b6-6bb7f2b3ef7f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Received event network-vif-deleted-df771f98-b8ac-43c4-8f5b-d09501711a88 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 763.965785] env[68244]: INFO nova.compute.manager [req-2208dc45-478e-4c90-b1a8-2b0ebd1b31ba req-69a32b84-9bd4-4ccf-93b6-6bb7f2b3ef7f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Neutron deleted interface df771f98-b8ac-43c4-8f5b-d09501711a88; detaching it from the instance and deleting it from the info cache [ 763.965967] env[68244]: DEBUG nova.network.neutron [req-2208dc45-478e-4c90-b1a8-2b0ebd1b31ba req-69a32b84-9bd4-4ccf-93b6-6bb7f2b3ef7f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.144718] env[68244]: DEBUG nova.scheduler.client.report [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.289095] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780176, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.302244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "refresh_cache-6abb889a-2e96-4aba-8e36-c4c8997dd4e2" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.302510] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Instance network_info: |[{"id": "e6164ba3-acf1-412b-b790-6713822c1144", "address": "fa:16:3e:c7:cd:23", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6164ba3-ac", "ovs_interfaceid": "e6164ba3-acf1-412b-b790-6713822c1144", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 764.302932] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:cd:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6164ba3-acf1-412b-b790-6713822c1144', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.310508] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Creating folder: Project (de3029d574fa4130b5fbbf34d2e39668). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.310806] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bfdc1fe-eb8b-407c-85d3-05952b637600 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.321702] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Created folder: Project (de3029d574fa4130b5fbbf34d2e39668) in parent group-v558876. [ 764.321897] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Creating folder: Instances. Parent ref: group-v558974. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.322144] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-633fa34a-b522-4713-965c-0c0b8882a6ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.332030] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Created folder: Instances in parent group-v558974. [ 764.332286] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.332482] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.332690] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2e058db-8ef1-43fc-9ab8-ab8d4b13c797 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.353666] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.353666] env[68244]: value = "task-2780179" [ 764.353666] env[68244]: _type = "Task" [ 764.353666] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.356988] env[68244]: DEBUG nova.network.neutron [-] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.365797] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780179, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.386245] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81e2242f-c485-470e-9bb0-714bd4471633 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.396360] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4331a5e-08af-4cf4-b7b9-f7668f97344f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.427055] env[68244]: DEBUG nova.compute.manager [req-ce2d0bec-71d9-40f2-b012-c9b8b5e96259 req-e191e40c-8831-4fb1-be3a-cc0b5bd90beb service nova] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Detach interface failed, port_id=1831bead-f9d8-4019-b2a9-1d401a809acf, reason: Instance 09ab8712-0f7a-4122-9d61-19da3e65d22b could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 764.450184] env[68244]: DEBUG nova.network.neutron [-] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.471370] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f9e5393-bfbd-466d-a26d-4ca45386cf97 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.480630] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f19e8d-f189-41a7-a7c6-b2d044b58326 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.514361] env[68244]: DEBUG nova.compute.manager [req-2208dc45-478e-4c90-b1a8-2b0ebd1b31ba req-69a32b84-9bd4-4ccf-93b6-6bb7f2b3ef7f service nova] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Detach interface failed, port_id=df771f98-b8ac-43c4-8f5b-d09501711a88, reason: Instance b0090ea8-98fe-42a0-97cc-40d7578851a9 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 764.649573] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.650154] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 764.652862] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.611s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.654237] env[68244]: INFO nova.compute.claims [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.790035] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780176, 'name': ReconfigVM_Task, 'duration_secs': 0.56372} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.790293] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/7778c027-d4af-436c-a545-aa513c0b1127.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 764.790909] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11a2a1ef-f931-47dc-b2b6-b62b2f91d836 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.798101] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 764.798101] env[68244]: value = "task-2780180" [ 764.798101] env[68244]: _type = "Task" [ 764.798101] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.804991] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780180, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.859134] env[68244]: INFO nova.compute.manager [-] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Took 1.52 seconds to deallocate network for instance. [ 764.864012] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780179, 'name': CreateVM_Task, 'duration_secs': 0.344062} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.866828] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.867480] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.867655] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.868017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 764.868463] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b3ff7d2-c0cf-4811-8d38-c22fc2363684 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.874110] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 764.874110] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522d593e-8199-57ae-a942-b1514fbd84cc" [ 764.874110] env[68244]: _type = "Task" [ 764.874110] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.882400] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522d593e-8199-57ae-a942-b1514fbd84cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.952690] env[68244]: INFO nova.compute.manager [-] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Took 1.28 seconds to deallocate network for instance. [ 765.158495] env[68244]: DEBUG nova.compute.utils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 765.161953] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 765.161953] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 765.200382] env[68244]: DEBUG nova.policy [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fd06cb45af94bb88bcf0e4399fe5265', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de3029d574fa4130b5fbbf34d2e39668', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 765.307796] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780180, 'name': Rename_Task, 'duration_secs': 0.142171} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.308095] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.308346] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a66cec4c-456a-49be-8fa3-0f2c9f46211e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.315348] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 765.315348] env[68244]: value = "task-2780181" [ 765.315348] env[68244]: _type = "Task" [ 765.315348] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.321828] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.372692] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.384205] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522d593e-8199-57ae-a942-b1514fbd84cc, 'name': SearchDatastore_Task, 'duration_secs': 0.009379} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.384522] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.384861] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.385122] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.385272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.385450] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.385741] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-027fa348-416b-4c3b-beb4-0c4e761e215d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.394628] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.394832] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.395652] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f1eaeb2-775c-4a05-b3eb-f20885fb5375 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.401136] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 765.401136] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529a5aee-5325-2d91-bf1e-f1fc167a495d" [ 765.401136] env[68244]: _type = "Task" [ 765.401136] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.408744] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529a5aee-5325-2d91-bf1e-f1fc167a495d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.459748] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.475469] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Successfully created port: 060a13a5-3b77-45b8-9522-05b2eb9e0e12 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.662823] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 765.825836] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.912608] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529a5aee-5325-2d91-bf1e-f1fc167a495d, 'name': SearchDatastore_Task, 'duration_secs': 0.008171} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.915941] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-396ab1af-12e3-4cfd-806b-ee972237c97e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.922507] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 765.922507] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d17c45-43ae-35ec-9f98-f39e8a550553" [ 765.922507] env[68244]: _type = "Task" [ 765.922507] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.933386] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d17c45-43ae-35ec-9f98-f39e8a550553, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.128500] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7882305-d9f1-4705-ab61-9876ce13f33a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.135933] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0e0efc-7fb2-4f97-a213-17c6e46db841 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.165793] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ee1f74-36ec-449f-be4d-3e898fc7da2f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.177053] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5128546a-2744-4ea8-8bfc-e3351f800c97 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.189506] env[68244]: DEBUG nova.compute.provider_tree [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.326439] env[68244]: DEBUG oslo_vmware.api [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780181, 'name': PowerOnVM_Task, 'duration_secs': 0.661809} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.326703] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 766.326914] env[68244]: INFO nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Took 9.27 seconds to spawn the instance on the hypervisor. [ 766.327155] env[68244]: DEBUG nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 766.327939] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8cdc8f-5d19-44fe-8f63-c61e85d98843 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.434416] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d17c45-43ae-35ec-9f98-f39e8a550553, 'name': SearchDatastore_Task, 'duration_secs': 0.011306} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.434705] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.434974] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 6abb889a-2e96-4aba-8e36-c4c8997dd4e2/6abb889a-2e96-4aba-8e36-c4c8997dd4e2.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.435522] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f832d38-507b-467f-b3c3-e6a640f8f043 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.442652] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 766.442652] env[68244]: value = "task-2780182" [ 766.442652] env[68244]: _type = "Task" [ 766.442652] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.450576] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780182, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.673932] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 766.692301] env[68244]: DEBUG nova.scheduler.client.report [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 766.705675] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 766.705756] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 766.705880] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 766.706070] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 766.706217] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 766.706361] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 766.706570] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 766.706750] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 766.706923] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 766.707109] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 766.707284] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 766.708236] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22356c64-37b3-4aac-9ab5-73b6245bcc4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.717034] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f25d740-8828-4fa9-bd48-857923241a30 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.849964] env[68244]: INFO nova.compute.manager [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Took 43.85 seconds to build instance. [ 766.886146] env[68244]: DEBUG nova.compute.manager [req-60fa42a7-7e1e-46e8-b8d4-2ee555cab110 req-1575bb01-f1bd-492b-8ffe-60d7194faa5c service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Received event network-vif-plugged-060a13a5-3b77-45b8-9522-05b2eb9e0e12 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 766.886366] env[68244]: DEBUG oslo_concurrency.lockutils [req-60fa42a7-7e1e-46e8-b8d4-2ee555cab110 req-1575bb01-f1bd-492b-8ffe-60d7194faa5c service nova] Acquiring lock "086dda59-4bd2-4ca2-a758-c120f1271f42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.886568] env[68244]: DEBUG oslo_concurrency.lockutils [req-60fa42a7-7e1e-46e8-b8d4-2ee555cab110 req-1575bb01-f1bd-492b-8ffe-60d7194faa5c service nova] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.886761] env[68244]: DEBUG oslo_concurrency.lockutils [req-60fa42a7-7e1e-46e8-b8d4-2ee555cab110 req-1575bb01-f1bd-492b-8ffe-60d7194faa5c service nova] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.886930] env[68244]: DEBUG nova.compute.manager [req-60fa42a7-7e1e-46e8-b8d4-2ee555cab110 req-1575bb01-f1bd-492b-8ffe-60d7194faa5c service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] No waiting events found dispatching network-vif-plugged-060a13a5-3b77-45b8-9522-05b2eb9e0e12 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 766.887208] env[68244]: WARNING nova.compute.manager [req-60fa42a7-7e1e-46e8-b8d4-2ee555cab110 req-1575bb01-f1bd-492b-8ffe-60d7194faa5c service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Received unexpected event network-vif-plugged-060a13a5-3b77-45b8-9522-05b2eb9e0e12 for instance with vm_state building and task_state spawning. [ 766.952780] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780182, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459949} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.953056] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 6abb889a-2e96-4aba-8e36-c4c8997dd4e2/6abb889a-2e96-4aba-8e36-c4c8997dd4e2.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 766.953270] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 766.953520] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d8626f3-9f81-4708-9681-8227f5b735ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.959308] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 766.959308] env[68244]: value = "task-2780183" [ 766.959308] env[68244]: _type = "Task" [ 766.959308] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.968275] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780183, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.985397] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Successfully updated port: 060a13a5-3b77-45b8-9522-05b2eb9e0e12 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.203268] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.203841] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 767.209678] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 31.691s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.209995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.210170] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 767.210674] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.016s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.212220] env[68244]: INFO nova.compute.claims [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.215829] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a389f4c5-9cf5-4c81-b002-b786adba1e39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.228022] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d405846-f706-4183-b9cc-c407276f007d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.241255] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8452cdf8-9f8e-4583-bb0a-fea51f5c8a67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.248422] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbcbf59-e05a-4454-a7f0-f6b462942480 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.282766] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179006MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 767.282912] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.353177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cb8ca0b6-4dd0-4117-91d1-66a82c093380 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "7778c027-d4af-436c-a545-aa513c0b1127" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.349s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.469806] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780183, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06523} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.470891] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.471782] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5d7cc9-25af-47a5-a744-fa55faf4d03f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.493163] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 6abb889a-2e96-4aba-8e36-c4c8997dd4e2/6abb889a-2e96-4aba-8e36-c4c8997dd4e2.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.493809] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "refresh_cache-086dda59-4bd2-4ca2-a758-c120f1271f42" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.493987] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "refresh_cache-086dda59-4bd2-4ca2-a758-c120f1271f42" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.494189] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.495490] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f38fb45-8288-4928-8a67-0912db8c199d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.519041] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 767.519041] env[68244]: value = "task-2780184" [ 767.519041] env[68244]: _type = "Task" [ 767.519041] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.529609] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780184, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.539182] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.665021] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Updating instance_info_cache with network_info: [{"id": "060a13a5-3b77-45b8-9522-05b2eb9e0e12", "address": "fa:16:3e:75:3a:98", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap060a13a5-3b", "ovs_interfaceid": "060a13a5-3b77-45b8-9522-05b2eb9e0e12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.710771] env[68244]: DEBUG nova.compute.utils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 767.712284] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 767.712375] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.749324] env[68244]: DEBUG nova.policy [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fd06cb45af94bb88bcf0e4399fe5265', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de3029d574fa4130b5fbbf34d2e39668', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 767.788703] env[68244]: INFO nova.compute.manager [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Rescuing [ 767.789070] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.789317] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.789532] env[68244]: DEBUG nova.network.neutron [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.857532] env[68244]: DEBUG nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 767.980919] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Successfully created port: 61c4e228-79c7-4531-bcb1-6cf1bed2010e {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.029817] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.167308] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "refresh_cache-086dda59-4bd2-4ca2-a758-c120f1271f42" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.167636] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Instance network_info: |[{"id": "060a13a5-3b77-45b8-9522-05b2eb9e0e12", "address": "fa:16:3e:75:3a:98", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap060a13a5-3b", "ovs_interfaceid": "060a13a5-3b77-45b8-9522-05b2eb9e0e12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 768.168113] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:3a:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '060a13a5-3b77-45b8-9522-05b2eb9e0e12', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 768.175613] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 768.175853] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 768.176169] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e90e410a-b828-4afb-abae-86af12ae950c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.196038] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 768.196038] env[68244]: value = "task-2780185" [ 768.196038] env[68244]: _type = "Task" [ 768.196038] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.212730] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780185, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.215475] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 768.378719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.531604] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780184, 'name': ReconfigVM_Task, 'duration_secs': 0.975304} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.531724] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 6abb889a-2e96-4aba-8e36-c4c8997dd4e2/6abb889a-2e96-4aba-8e36-c4c8997dd4e2.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.532362] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ea234a3-84e2-4b3f-b52e-96cfc391bbe0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.538408] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 768.538408] env[68244]: value = "task-2780186" [ 768.538408] env[68244]: _type = "Task" [ 768.538408] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.539305] env[68244]: DEBUG nova.network.neutron [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Updating instance_info_cache with network_info: [{"id": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "address": "fa:16:3e:c6:34:d3", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b7e2c4-a3", "ovs_interfaceid": "a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.556036] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780186, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.711367] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780185, 'name': CreateVM_Task, 'duration_secs': 0.303606} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.711514] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 768.712216] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.712383] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.712697] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 768.712978] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fac81dd5-d4e6-4d60-92aa-ece96bfc945e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.717564] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 768.717564] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5247c52b-fc7a-8357-b59d-2273026f792c" [ 768.717564] env[68244]: _type = "Task" [ 768.717564] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.732367] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5247c52b-fc7a-8357-b59d-2273026f792c, 'name': SearchDatastore_Task, 'duration_secs': 0.009751} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.732901] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.733448] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.733448] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.733448] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.733555] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.733775] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-933cb877-1c82-474e-b27a-597559a1ef8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.741863] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.742084] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.742828] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f92d3f8-49dc-4921-bbb6-7105b4708569 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.747856] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 768.747856] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aafffa-15c6-7533-3f58-e27f85475700" [ 768.747856] env[68244]: _type = "Task" [ 768.747856] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.752774] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c25adef-fe36-485d-92ca-e04fb19cc0bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.761835] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aafffa-15c6-7533-3f58-e27f85475700, 'name': SearchDatastore_Task, 'duration_secs': 0.008633} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.764216] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de372388-ccf3-4728-b5f3-05e4696a8334 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.767203] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8f4b3a-bef3-4328-948c-3b30a08d1337 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.773347] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 768.773347] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52013063-9c67-c57b-7842-07aa6e457213" [ 768.773347] env[68244]: _type = "Task" [ 768.773347] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.802333] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba72ff0d-8a19-429c-82eb-4954397b9e64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.810032] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52013063-9c67-c57b-7842-07aa6e457213, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.812106] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.812364] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 086dda59-4bd2-4ca2-a758-c120f1271f42/086dda59-4bd2-4ca2-a758-c120f1271f42.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 768.812642] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f81348c-2df4-4fb0-9063-873a7b2f0ab1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.815395] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cecafab-8bd5-4dca-b4fd-72dc4e190050 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.829464] env[68244]: DEBUG nova.compute.provider_tree [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.831938] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 768.831938] env[68244]: value = "task-2780187" [ 768.831938] env[68244]: _type = "Task" [ 768.831938] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.840449] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.051719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-7778c027-d4af-436c-a545-aa513c0b1127" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.054216] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780186, 'name': Rename_Task, 'duration_secs': 0.176831} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.054806] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 769.055101] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5de97151-9391-4b47-a7c5-90fbe1c267a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.062979] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 769.062979] env[68244]: value = "task-2780188" [ 769.062979] env[68244]: _type = "Task" [ 769.062979] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.072747] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.229310] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 769.237928] env[68244]: DEBUG nova.compute.manager [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Received event network-changed-060a13a5-3b77-45b8-9522-05b2eb9e0e12 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 769.237928] env[68244]: DEBUG nova.compute.manager [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Refreshing instance network info cache due to event network-changed-060a13a5-3b77-45b8-9522-05b2eb9e0e12. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 769.237928] env[68244]: DEBUG oslo_concurrency.lockutils [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] Acquiring lock "refresh_cache-086dda59-4bd2-4ca2-a758-c120f1271f42" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.237928] env[68244]: DEBUG oslo_concurrency.lockutils [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] Acquired lock "refresh_cache-086dda59-4bd2-4ca2-a758-c120f1271f42" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.237928] env[68244]: DEBUG nova.network.neutron [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Refreshing network info cache for port 060a13a5-3b77-45b8-9522-05b2eb9e0e12 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.258506] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 769.259275] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 769.259482] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 769.259685] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 769.259838] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 769.259986] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 769.260230] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 769.260391] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 769.260558] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 769.260722] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 769.260914] env[68244]: DEBUG nova.virt.hardware [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 769.262457] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e054ce-7dfc-4595-a4ea-79a903365564 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.272078] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a60482-933d-481e-b8f2-fbf29ea2d6ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.334058] env[68244]: DEBUG nova.scheduler.client.report [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.347630] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462521} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.347964] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 086dda59-4bd2-4ca2-a758-c120f1271f42/086dda59-4bd2-4ca2-a758-c120f1271f42.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.348204] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.348629] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc891299-c912-4708-b11a-8ec90772aefd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.355027] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 769.355027] env[68244]: value = "task-2780189" [ 769.355027] env[68244]: _type = "Task" [ 769.355027] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.363398] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780189, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.480239] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Successfully updated port: 61c4e228-79c7-4531-bcb1-6cf1bed2010e {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 769.573897] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780188, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.842401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.842915] env[68244]: DEBUG nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 769.845337] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.899s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.845547] env[68244]: DEBUG nova.objects.instance [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lazy-loading 'resources' on Instance uuid fd4d5494-042b-457e-a826-dee4d87c0032 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 769.866287] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780189, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062849} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.866832] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.867315] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f253a870-242d-4edd-9ef7-ba40b976fb7a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.891173] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 086dda59-4bd2-4ca2-a758-c120f1271f42/086dda59-4bd2-4ca2-a758-c120f1271f42.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.891870] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3934e7f-3de7-424a-b097-91ef1c53a31f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.914395] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 769.914395] env[68244]: value = "task-2780190" [ 769.914395] env[68244]: _type = "Task" [ 769.914395] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.922233] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780190, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.939588] env[68244]: DEBUG nova.network.neutron [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Updated VIF entry in instance network info cache for port 060a13a5-3b77-45b8-9522-05b2eb9e0e12. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 769.939995] env[68244]: DEBUG nova.network.neutron [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Updating instance_info_cache with network_info: [{"id": "060a13a5-3b77-45b8-9522-05b2eb9e0e12", "address": "fa:16:3e:75:3a:98", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap060a13a5-3b", "ovs_interfaceid": "060a13a5-3b77-45b8-9522-05b2eb9e0e12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.983374] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "refresh_cache-4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.983523] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "refresh_cache-4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.983704] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.075389] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780188, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.349036] env[68244]: DEBUG nova.compute.utils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 770.351042] env[68244]: DEBUG nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 770.351042] env[68244]: DEBUG nova.network.neutron [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 770.420121] env[68244]: DEBUG nova.policy [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139b09aa798646eb84621ae2ec19fa42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aee7371df22443b6865a8934ea27c685', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 770.427969] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780190, 'name': ReconfigVM_Task, 'duration_secs': 0.29108} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.428248] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 086dda59-4bd2-4ca2-a758-c120f1271f42/086dda59-4bd2-4ca2-a758-c120f1271f42.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.430479] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58edede3-b73e-4fa0-ba81-0afcd53dfbff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.437798] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 770.437798] env[68244]: value = "task-2780191" [ 770.437798] env[68244]: _type = "Task" [ 770.437798] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.446135] env[68244]: DEBUG oslo_concurrency.lockutils [req-0a9571bc-abd6-4cd5-853a-67d77ab85d62 req-2c83b8c5-b575-4ad6-9a36-e3f506244dee service nova] Releasing lock "refresh_cache-086dda59-4bd2-4ca2-a758-c120f1271f42" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.446765] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780191, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.535667] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.574830] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780188, 'name': PowerOnVM_Task, 'duration_secs': 1.093742} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.575074] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.575311] env[68244]: INFO nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Took 10.85 seconds to spawn the instance on the hypervisor. [ 770.575493] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.576327] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51134d0b-db5c-4e7e-a893-1c8c599388aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.590425] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 770.590867] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-201a6c21-656d-48b6-94a7-f68cf6df9d89 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.596557] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 770.596557] env[68244]: value = "task-2780192" [ 770.596557] env[68244]: _type = "Task" [ 770.596557] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.606228] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.713615] env[68244]: DEBUG nova.network.neutron [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Successfully created port: 1599a78e-a9aa-40b6-a19e-a36bfd719afc {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 770.774860] env[68244]: DEBUG nova.network.neutron [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Updating instance_info_cache with network_info: [{"id": "61c4e228-79c7-4531-bcb1-6cf1bed2010e", "address": "fa:16:3e:c7:15:d9", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61c4e228-79", "ovs_interfaceid": "61c4e228-79c7-4531-bcb1-6cf1bed2010e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.858812] env[68244]: DEBUG nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 770.896526] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371bf89c-5aa6-4f21-8fed-73381c793d15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.904696] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cc66d3-bdf7-41c7-ba79-7dd776a06874 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.945768] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b1c238-574a-417c-ad1a-d2e82a5d2bd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.955568] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780191, 'name': Rename_Task, 'duration_secs': 0.384193} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.957889] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.958606] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9af5905-cbd0-499b-8990-793e35ecb1ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.961333] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e116dd-892c-4340-897a-11c31adf45ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.976196] env[68244]: DEBUG nova.compute.provider_tree [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.979304] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 770.979304] env[68244]: value = "task-2780193" [ 770.979304] env[68244]: _type = "Task" [ 770.979304] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.987190] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780193, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.101308] env[68244]: INFO nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Took 45.33 seconds to build instance. [ 771.107397] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780192, 'name': PowerOffVM_Task, 'duration_secs': 0.337142} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.107663] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 771.108729] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbabdf5-4020-4c75-9345-4aa02b5b75b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.127546] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992f880b-44e3-47a6-93e3-317022a5f372 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.160149] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.160420] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4794a145-201d-4d4d-89d9-8fdfa2192311 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.168303] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 771.168303] env[68244]: value = "task-2780194" [ 771.168303] env[68244]: _type = "Task" [ 771.168303] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.176280] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.278064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "refresh_cache-4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.278411] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Instance network_info: |[{"id": "61c4e228-79c7-4531-bcb1-6cf1bed2010e", "address": "fa:16:3e:c7:15:d9", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61c4e228-79", "ovs_interfaceid": "61c4e228-79c7-4531-bcb1-6cf1bed2010e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 771.278842] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:15:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61c4e228-79c7-4531-bcb1-6cf1bed2010e', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 771.287314] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 771.288612] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 771.289831] env[68244]: DEBUG nova.compute.manager [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Received event network-vif-plugged-61c4e228-79c7-4531-bcb1-6cf1bed2010e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 771.290073] env[68244]: DEBUG oslo_concurrency.lockutils [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] Acquiring lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.290318] env[68244]: DEBUG oslo_concurrency.lockutils [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.290518] env[68244]: DEBUG oslo_concurrency.lockutils [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.290710] env[68244]: DEBUG nova.compute.manager [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] No waiting events found dispatching network-vif-plugged-61c4e228-79c7-4531-bcb1-6cf1bed2010e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 771.290904] env[68244]: WARNING nova.compute.manager [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Received unexpected event network-vif-plugged-61c4e228-79c7-4531-bcb1-6cf1bed2010e for instance with vm_state building and task_state spawning. [ 771.291128] env[68244]: DEBUG nova.compute.manager [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Received event network-changed-61c4e228-79c7-4531-bcb1-6cf1bed2010e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 771.291334] env[68244]: DEBUG nova.compute.manager [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Refreshing instance network info cache due to event network-changed-61c4e228-79c7-4531-bcb1-6cf1bed2010e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 771.291545] env[68244]: DEBUG oslo_concurrency.lockutils [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] Acquiring lock "refresh_cache-4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.291706] env[68244]: DEBUG oslo_concurrency.lockutils [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] Acquired lock "refresh_cache-4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.291887] env[68244]: DEBUG nova.network.neutron [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Refreshing network info cache for port 61c4e228-79c7-4531-bcb1-6cf1bed2010e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.293015] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e3eea1f-c2ce-4596-b2a3-69c841bcb606 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.314139] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 771.314139] env[68244]: value = "task-2780195" [ 771.314139] env[68244]: _type = "Task" [ 771.314139] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.325169] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780195, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.481469] env[68244]: DEBUG nova.scheduler.client.report [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.498031] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780193, 'name': PowerOnVM_Task, 'duration_secs': 0.449181} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.498313] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.498519] env[68244]: INFO nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Took 4.82 seconds to spawn the instance on the hypervisor. [ 771.498696] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 771.501853] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0acebd-4d1f-42ff-8a46-450ba58f7f71 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.603675] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.640s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.679082] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 771.679306] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.679545] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.679692] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.679870] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.680399] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92b5a9bc-78c9-4aa0-905b-760327a64ba2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.689169] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.689348] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.690031] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1ec6f9e-d7a6-4e92-aee7-31e6fbcdf95f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.694805] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 771.694805] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5228fa72-6584-8628-5da2-43baadbc87c6" [ 771.694805] env[68244]: _type = "Task" [ 771.694805] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.703497] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5228fa72-6584-8628-5da2-43baadbc87c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.824105] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780195, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.865694] env[68244]: DEBUG nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 771.886109] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 771.886366] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.886522] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 771.886761] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.886929] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 771.887113] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 771.887326] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 771.887482] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 771.887644] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 771.887989] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 771.888217] env[68244]: DEBUG nova.virt.hardware [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 771.889096] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf9ebce-ee4d-4fe1-9c11-45c257ca5a9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.898136] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67db3d5e-0049-4ceb-bb57-82aaa352ddf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.993929] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.996265] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.481s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.996495] env[68244]: DEBUG nova.objects.instance [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lazy-loading 'resources' on Instance uuid f270caad-1b02-4d5b-a435-37b77c05c4e7 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 772.017586] env[68244]: INFO nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Took 37.21 seconds to build instance. [ 772.021842] env[68244]: INFO nova.scheduler.client.report [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Deleted allocations for instance fd4d5494-042b-457e-a826-dee4d87c0032 [ 772.063088] env[68244]: DEBUG nova.network.neutron [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Updated VIF entry in instance network info cache for port 61c4e228-79c7-4531-bcb1-6cf1bed2010e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 772.063559] env[68244]: DEBUG nova.network.neutron [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Updating instance_info_cache with network_info: [{"id": "61c4e228-79c7-4531-bcb1-6cf1bed2010e", "address": "fa:16:3e:c7:15:d9", "network": {"id": "89b6fb0c-c96b-4a64-8111-47c17dd31fb1", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-185790867-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de3029d574fa4130b5fbbf34d2e39668", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61c4e228-79", "ovs_interfaceid": "61c4e228-79c7-4531-bcb1-6cf1bed2010e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.106977] env[68244]: DEBUG nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 772.196553] env[68244]: DEBUG nova.network.neutron [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Successfully updated port: 1599a78e-a9aa-40b6-a19e-a36bfd719afc {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 772.209620] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5228fa72-6584-8628-5da2-43baadbc87c6, 'name': SearchDatastore_Task, 'duration_secs': 0.010766} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.210568] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b65188a7-1e45-4c02-bf7c-01bf105d5ad3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.217423] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 772.217423] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52453b64-6103-1425-bda5-fbf6ead82754" [ 772.217423] env[68244]: _type = "Task" [ 772.217423] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.225798] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52453b64-6103-1425-bda5-fbf6ead82754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.325424] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780195, 'name': CreateVM_Task, 'duration_secs': 0.55432} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.325424] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 772.325907] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.326083] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.327032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 772.327032] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7cd5fe1-3648-4742-b07f-31b92ff4f463 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.331521] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 772.331521] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52556d87-4356-aa50-94ac-cc91735e32b5" [ 772.331521] env[68244]: _type = "Task" [ 772.331521] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.340264] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52556d87-4356-aa50-94ac-cc91735e32b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.521948] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.518s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.529941] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bdc54ab6-26f1-449d-91e4-3342f56c2154 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "fd4d5494-042b-457e-a826-dee4d87c0032" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.991s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.566507] env[68244]: DEBUG oslo_concurrency.lockutils [req-7cbb42a1-e869-40f5-b86f-a9350efb6d69 req-990273a1-1894-4d4d-8a75-214da4346410 service nova] Releasing lock "refresh_cache-4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.628484] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.703995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "refresh_cache-d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.703995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquired lock "refresh_cache-d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.704239] env[68244]: DEBUG nova.network.neutron [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.733910] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52453b64-6103-1425-bda5-fbf6ead82754, 'name': SearchDatastore_Task, 'duration_secs': 0.009846} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.733910] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.733910] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. {{(pid=68244) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 772.734163] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d9919b4-f266-4b64-98aa-089eec154c3d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.743549] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 772.743549] env[68244]: value = "task-2780196" [ 772.743549] env[68244]: _type = "Task" [ 772.743549] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.752204] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.841295] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52556d87-4356-aa50-94ac-cc91735e32b5, 'name': SearchDatastore_Task, 'duration_secs': 0.0125} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.843927] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.844361] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 772.844445] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.844543] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.844752] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 772.845235] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea6f1ec1-623c-4a19-a674-b153301b52b9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.853141] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 772.853323] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 772.856482] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e27a4cfe-e64b-4144-a2d3-6608be7e1199 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.862527] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 772.862527] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527a341e-af89-9ff1-936c-f78d086309fd" [ 772.862527] env[68244]: _type = "Task" [ 772.862527] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.870808] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527a341e-af89-9ff1-936c-f78d086309fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.027678] env[68244]: DEBUG nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.030909] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5895c1ef-1904-48da-b83e-8334f3471d70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.039895] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a21300f-da91-40ac-8e9c-115c3f1c11de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.073575] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b4ee99-4cec-4e05-969b-386077a8ef7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.081415] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5a5283-fa11-4321-9ff6-b48ae1da300a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.096764] env[68244]: DEBUG nova.compute.provider_tree [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.254420] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780196, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.259716] env[68244]: DEBUG nova.network.neutron [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.316101] env[68244]: DEBUG nova.compute.manager [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Received event network-vif-plugged-1599a78e-a9aa-40b6-a19e-a36bfd719afc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 773.316383] env[68244]: DEBUG oslo_concurrency.lockutils [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] Acquiring lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.316762] env[68244]: DEBUG oslo_concurrency.lockutils [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.317010] env[68244]: DEBUG oslo_concurrency.lockutils [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.317223] env[68244]: DEBUG nova.compute.manager [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] No waiting events found dispatching network-vif-plugged-1599a78e-a9aa-40b6-a19e-a36bfd719afc {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 773.317426] env[68244]: WARNING nova.compute.manager [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Received unexpected event network-vif-plugged-1599a78e-a9aa-40b6-a19e-a36bfd719afc for instance with vm_state building and task_state spawning. [ 773.317623] env[68244]: DEBUG nova.compute.manager [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Received event network-changed-1599a78e-a9aa-40b6-a19e-a36bfd719afc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 773.317842] env[68244]: DEBUG nova.compute.manager [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Refreshing instance network info cache due to event network-changed-1599a78e-a9aa-40b6-a19e-a36bfd719afc. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 773.318102] env[68244]: DEBUG oslo_concurrency.lockutils [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] Acquiring lock "refresh_cache-d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.375394] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527a341e-af89-9ff1-936c-f78d086309fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009911} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.375394] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de136dcc-7ee4-4803-a453-22b2f0a96a8f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.381335] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 773.381335] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fd0c37-1143-3a0c-013c-0f2328bef401" [ 773.381335] env[68244]: _type = "Task" [ 773.381335] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.390354] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fd0c37-1143-3a0c-013c-0f2328bef401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.463996] env[68244]: DEBUG nova.network.neutron [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Updating instance_info_cache with network_info: [{"id": "1599a78e-a9aa-40b6-a19e-a36bfd719afc", "address": "fa:16:3e:85:74:bc", "network": {"id": "1b8ffe42-6475-47eb-8ce0-80be82adaa12", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-168449421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aee7371df22443b6865a8934ea27c685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1599a78e-a9", "ovs_interfaceid": "1599a78e-a9aa-40b6-a19e-a36bfd719afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.554939] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.599937] env[68244]: DEBUG nova.scheduler.client.report [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.763752] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53331} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.767021] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. [ 773.767021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f902cd-36a3-47af-a889-ee1f7adceaa7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.798700] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.799180] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6e30ba0-aa5e-4c6f-a99b-c18440aff2f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.820107] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 773.820107] env[68244]: value = "task-2780197" [ 773.820107] env[68244]: _type = "Task" [ 773.820107] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.828935] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780197, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.892050] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fd0c37-1143-3a0c-013c-0f2328bef401, 'name': SearchDatastore_Task, 'duration_secs': 0.039666} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.892050] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.892050] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f/4c394e1d-8fef-4b7a-ac9f-550f263c1d7f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 773.892537] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5862dcf3-0b74-44e0-9522-6e6d4b05cea2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.899046] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 773.899046] env[68244]: value = "task-2780198" [ 773.899046] env[68244]: _type = "Task" [ 773.899046] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.907599] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.965642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Releasing lock "refresh_cache-d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.966008] env[68244]: DEBUG nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Instance network_info: |[{"id": "1599a78e-a9aa-40b6-a19e-a36bfd719afc", "address": "fa:16:3e:85:74:bc", "network": {"id": "1b8ffe42-6475-47eb-8ce0-80be82adaa12", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-168449421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aee7371df22443b6865a8934ea27c685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1599a78e-a9", "ovs_interfaceid": "1599a78e-a9aa-40b6-a19e-a36bfd719afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 773.966376] env[68244]: DEBUG oslo_concurrency.lockutils [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] Acquired lock "refresh_cache-d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.966496] env[68244]: DEBUG nova.network.neutron [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Refreshing network info cache for port 1599a78e-a9aa-40b6-a19e-a36bfd719afc {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.967775] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:74:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2c68e7-b690-42e2-9491-c3f9357cc66a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1599a78e-a9aa-40b6-a19e-a36bfd719afc', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.977145] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Creating folder: Project (aee7371df22443b6865a8934ea27c685). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.980453] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38d60576-bcb6-48c1-8048-5a29c0e31896 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.992646] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Created folder: Project (aee7371df22443b6865a8934ea27c685) in parent group-v558876. [ 773.992966] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Creating folder: Instances. Parent ref: group-v558979. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.993958] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18fc8666-70b9-48b3-858f-49c37af12a29 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.005564] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Created folder: Instances in parent group-v558979. [ 774.005882] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 774.006025] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 774.006233] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1917bf45-98d0-4959-b2e4-73ef9a8c33ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.028543] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 774.028543] env[68244]: value = "task-2780201" [ 774.028543] env[68244]: _type = "Task" [ 774.028543] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.037108] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780201, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.105387] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.107702] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.005s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.109668] env[68244]: INFO nova.compute.claims [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.129224] env[68244]: INFO nova.scheduler.client.report [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Deleted allocations for instance f270caad-1b02-4d5b-a435-37b77c05c4e7 [ 774.308689] env[68244]: DEBUG nova.network.neutron [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Updated VIF entry in instance network info cache for port 1599a78e-a9aa-40b6-a19e-a36bfd719afc. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.309138] env[68244]: DEBUG nova.network.neutron [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Updating instance_info_cache with network_info: [{"id": "1599a78e-a9aa-40b6-a19e-a36bfd719afc", "address": "fa:16:3e:85:74:bc", "network": {"id": "1b8ffe42-6475-47eb-8ce0-80be82adaa12", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-168449421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aee7371df22443b6865a8934ea27c685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2c68e7-b690-42e2-9491-c3f9357cc66a", "external-id": "nsx-vlan-transportzone-321", "segmentation_id": 321, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1599a78e-a9", "ovs_interfaceid": "1599a78e-a9aa-40b6-a19e-a36bfd719afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.333499] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.409633] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780198, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.539157] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780201, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.637023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c32840fc-13fe-4935-80a9-72edc9f6c929 tempest-DeleteServersAdminTestJSON-112130238 tempest-DeleteServersAdminTestJSON-112130238-project-member] Lock "f270caad-1b02-4d5b-a435-37b77c05c4e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.542s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.814133] env[68244]: DEBUG oslo_concurrency.lockutils [req-6940be6e-fd9d-43dd-a988-ddd344a3a7a6 req-a8f72bd8-262d-4e3f-b079-d90abe0151c3 service nova] Releasing lock "refresh_cache-d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.830389] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780197, 'name': ReconfigVM_Task, 'duration_secs': 0.789912} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.830908] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.831818] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86319c7a-b551-463b-9d4b-8ab6e214b8fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.858753] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99e6a029-86bf-49ff-b635-2e4f1bbfb91e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.876179] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 774.876179] env[68244]: value = "task-2780202" [ 774.876179] env[68244]: _type = "Task" [ 774.876179] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.884111] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.909764] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.646888} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.910275] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f/4c394e1d-8fef-4b7a-ac9f-550f263c1d7f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 774.910548] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 774.910925] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51ea5fc8-f5d6-47e1-babb-b460d1d66565 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.918127] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 774.918127] env[68244]: value = "task-2780203" [ 774.918127] env[68244]: _type = "Task" [ 774.918127] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.925911] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.042162] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780201, 'name': CreateVM_Task, 'duration_secs': 0.749102} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.042539] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 775.045032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.045032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.045032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 775.045501] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45abf602-ae2b-4e8e-a591-b7628cf98572 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.053372] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 775.053372] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f576e5-78d3-2de8-aa04-1c00cae8d84d" [ 775.053372] env[68244]: _type = "Task" [ 775.053372] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.067020] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f576e5-78d3-2de8-aa04-1c00cae8d84d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.389151] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.429719] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.178698} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.429999] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.430916] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf83223f-869b-437d-9a9a-49868b208b6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.459960] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f/4c394e1d-8fef-4b7a-ac9f-550f263c1d7f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.460249] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faa79cce-e595-409d-9e12-4e93c4f60e9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.481414] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 775.481414] env[68244]: value = "task-2780204" [ 775.481414] env[68244]: _type = "Task" [ 775.481414] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.489690] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780204, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.566607] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f576e5-78d3-2de8-aa04-1c00cae8d84d, 'name': SearchDatastore_Task, 'duration_secs': 0.012058} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.566607] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.566607] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 775.566607] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.566761] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.566804] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 775.567080] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6274266-3f85-4a34-9fc4-8c7c82c85384 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.579399] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 775.579609] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 775.580371] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687f08e7-9184-4370-b974-4862e52bc048 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.588087] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 775.588087] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520e7a6a-c3c0-7eff-e1b5-7ebc3d9ab1f1" [ 775.588087] env[68244]: _type = "Task" [ 775.588087] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.597550] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520e7a6a-c3c0-7eff-e1b5-7ebc3d9ab1f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.625974] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c95ad8-a429-4154-8340-0ebc319fffc8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.632929] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f341611d-a6a1-4518-8062-3c8fd4039317 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.664934] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75fc686-7779-4544-afe6-383a5587969d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.672096] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac1eecc-c70a-43a4-b1c1-494453928520 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.685240] env[68244]: DEBUG nova.compute.provider_tree [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.797799] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.798429] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.886591] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780202, 'name': ReconfigVM_Task, 'duration_secs': 0.574841} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.886917] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.887280] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48d42ff1-2702-4bc4-b280-a85e1fd65f53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.892953] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 775.892953] env[68244]: value = "task-2780205" [ 775.892953] env[68244]: _type = "Task" [ 775.892953] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.900864] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.991569] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780204, 'name': ReconfigVM_Task, 'duration_secs': 0.381022} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.991758] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f/4c394e1d-8fef-4b7a-ac9f-550f263c1d7f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.992526] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8478678-8b3a-4ba3-99b4-46e798c28a3c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.002033] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 776.002033] env[68244]: value = "task-2780206" [ 776.002033] env[68244]: _type = "Task" [ 776.002033] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.011462] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780206, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.098043] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520e7a6a-c3c0-7eff-e1b5-7ebc3d9ab1f1, 'name': SearchDatastore_Task, 'duration_secs': 0.037796} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.098983] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e32529f-6bac-4c49-ab5c-d9a361552a9a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.104339] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 776.104339] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529220ba-39e9-e2f7-f68b-2f13472fdc6f" [ 776.104339] env[68244]: _type = "Task" [ 776.104339] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.114566] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529220ba-39e9-e2f7-f68b-2f13472fdc6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.189355] env[68244]: DEBUG nova.scheduler.client.report [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 776.402871] env[68244]: DEBUG oslo_vmware.api [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780205, 'name': PowerOnVM_Task, 'duration_secs': 0.428998} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.403019] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 776.407234] env[68244]: DEBUG nova.compute.manager [None req-c1ef23e1-6806-4d67-9bd0-00c4b61bbe93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 776.407665] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803dd671-b2a7-4937-89d2-efdcaa93de8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.509012] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780206, 'name': Rename_Task, 'duration_secs': 0.135604} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.509334] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.509538] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22953826-c612-42f5-a85d-99babf364c4d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.515936] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 776.515936] env[68244]: value = "task-2780207" [ 776.515936] env[68244]: _type = "Task" [ 776.515936] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.527894] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.614018] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529220ba-39e9-e2f7-f68b-2f13472fdc6f, 'name': SearchDatastore_Task, 'duration_secs': 0.014919} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.614296] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.614585] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d1fb6fff-b1b7-4c1b-8995-41628cadf7d5/d1fb6fff-b1b7-4c1b-8995-41628cadf7d5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 776.614805] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50220cca-50eb-4d05-8e31-a71f3c0c760e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.621397] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 776.621397] env[68244]: value = "task-2780208" [ 776.621397] env[68244]: _type = "Task" [ 776.621397] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.629025] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.694175] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.694716] env[68244]: DEBUG nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 776.697472] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.680s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.698963] env[68244]: INFO nova.compute.claims [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.026931] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780207, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.131465] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780208, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.203596] env[68244]: DEBUG nova.compute.utils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 777.207195] env[68244]: DEBUG nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 777.207363] env[68244]: DEBUG nova.network.neutron [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 777.247856] env[68244]: DEBUG nova.policy [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e2b78ca269843a0a5541e44727d807b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaf55a7bfa5948d1837855650c1c960b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 777.526860] env[68244]: DEBUG oslo_vmware.api [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780207, 'name': PowerOnVM_Task, 'duration_secs': 0.553679} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.527453] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.527808] env[68244]: INFO nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Took 8.30 seconds to spawn the instance on the hypervisor. [ 777.528189] env[68244]: DEBUG nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.529652] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201f809e-8912-4f92-98be-33a43f7ac5f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.632708] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573783} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.633256] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d1fb6fff-b1b7-4c1b-8995-41628cadf7d5/d1fb6fff-b1b7-4c1b-8995-41628cadf7d5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.633758] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.634157] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bea4872c-97a1-4eb7-8910-10b72e20212f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.641570] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 777.641570] env[68244]: value = "task-2780209" [ 777.641570] env[68244]: _type = "Task" [ 777.641570] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.650818] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.710416] env[68244]: DEBUG nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 777.765938] env[68244]: DEBUG nova.network.neutron [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Successfully created port: 3d483b86-624f-47ef-844a-5e5c7bf1d4ad {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.056164] env[68244]: INFO nova.compute.manager [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Took 43.03 seconds to build instance. [ 778.154010] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071561} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.155354] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 778.159077] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e3a5aa-c706-46b2-b7ca-14a057b52316 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.183177] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] d1fb6fff-b1b7-4c1b-8995-41628cadf7d5/d1fb6fff-b1b7-4c1b-8995-41628cadf7d5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.186053] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79ac7a47-6bf5-49ec-9e8d-60176b1e96bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.207269] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 778.207269] env[68244]: value = "task-2780210" [ 778.207269] env[68244]: _type = "Task" [ 778.207269] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.215095] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.344505] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9324b9ff-c391-4cb7-a8b2-0263be03e944 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.352885] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de21ab0d-3851-4409-b45d-0856e04d25a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.385318] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d1ef32-8b0d-4a48-b7bb-f76737cc8652 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.394160] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf88cfc-87d8-4502-92c4-de3a6a0cf129 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.408053] env[68244]: DEBUG nova.compute.provider_tree [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.559799] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bef413b4-194f-44e3-bce1-697f8043b915 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.503s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.717891] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780210, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.728336] env[68244]: DEBUG nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 778.736651] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Acquiring lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.736797] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.760862] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 778.761117] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.761272] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 778.761448] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.761587] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 778.761724] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 778.761943] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 778.762109] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 778.762271] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 778.762425] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 778.762591] env[68244]: DEBUG nova.virt.hardware [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 778.763472] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa99da0-8a7d-48c0-8468-0c2ce70ffb74 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.771582] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478db9b4-a069-4b84-bde6-6920c8755614 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.912487] env[68244]: DEBUG nova.scheduler.client.report [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 779.062545] env[68244]: DEBUG nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 779.089878] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.089964] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.090294] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.091415] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.091415] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.094494] env[68244]: INFO nova.compute.manager [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Terminating instance [ 779.217596] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780210, 'name': ReconfigVM_Task, 'duration_secs': 0.742526} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.217901] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Reconfigured VM instance instance-00000022 to attach disk [datastore2] d1fb6fff-b1b7-4c1b-8995-41628cadf7d5/d1fb6fff-b1b7-4c1b-8995-41628cadf7d5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.218555] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a1d1515-d7ac-422c-ae7d-2f85b8d4fead {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.225495] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 779.225495] env[68244]: value = "task-2780211" [ 779.225495] env[68244]: _type = "Task" [ 779.225495] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.234220] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780211, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.240300] env[68244]: DEBUG nova.compute.utils [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 779.323522] env[68244]: DEBUG nova.network.neutron [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Successfully updated port: 3d483b86-624f-47ef-844a-5e5c7bf1d4ad {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 779.427263] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.427263] env[68244]: DEBUG nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 779.428726] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.572s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.430339] env[68244]: INFO nova.compute.claims [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 779.516873] env[68244]: DEBUG nova.compute.manager [req-76d6dc73-ce7b-4d5f-b86d-0b48cff8ad5a req-e388680d-43ee-4302-8c49-31259f6f6113 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Received event network-vif-plugged-3d483b86-624f-47ef-844a-5e5c7bf1d4ad {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 779.517183] env[68244]: DEBUG oslo_concurrency.lockutils [req-76d6dc73-ce7b-4d5f-b86d-0b48cff8ad5a req-e388680d-43ee-4302-8c49-31259f6f6113 service nova] Acquiring lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.517329] env[68244]: DEBUG oslo_concurrency.lockutils [req-76d6dc73-ce7b-4d5f-b86d-0b48cff8ad5a req-e388680d-43ee-4302-8c49-31259f6f6113 service nova] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.517512] env[68244]: DEBUG oslo_concurrency.lockutils [req-76d6dc73-ce7b-4d5f-b86d-0b48cff8ad5a req-e388680d-43ee-4302-8c49-31259f6f6113 service nova] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.517678] env[68244]: DEBUG nova.compute.manager [req-76d6dc73-ce7b-4d5f-b86d-0b48cff8ad5a req-e388680d-43ee-4302-8c49-31259f6f6113 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] No waiting events found dispatching network-vif-plugged-3d483b86-624f-47ef-844a-5e5c7bf1d4ad {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.517909] env[68244]: WARNING nova.compute.manager [req-76d6dc73-ce7b-4d5f-b86d-0b48cff8ad5a req-e388680d-43ee-4302-8c49-31259f6f6113 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Received unexpected event network-vif-plugged-3d483b86-624f-47ef-844a-5e5c7bf1d4ad for instance with vm_state building and task_state spawning. [ 779.588299] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.601321] env[68244]: DEBUG nova.compute.manager [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 779.601620] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 779.602631] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d29157a-6422-4563-9875-f69194bc0539 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.610227] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.610885] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8e85080-b37a-4688-82e1-a98fbb02bd0c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.616341] env[68244]: DEBUG oslo_vmware.api [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 779.616341] env[68244]: value = "task-2780212" [ 779.616341] env[68244]: _type = "Task" [ 779.616341] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.624058] env[68244]: DEBUG oslo_vmware.api [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.737300] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780211, 'name': Rename_Task, 'duration_secs': 0.34976} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.737592] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.737865] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d08f958-a9cd-4007-b91f-b1f885b6ac95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.743416] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.745264] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 779.745264] env[68244]: value = "task-2780213" [ 779.745264] env[68244]: _type = "Task" [ 779.745264] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.754143] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.827243] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-c9f5fbeb-28b6-4b41-9156-5b90bc19977c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.827324] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-c9f5fbeb-28b6-4b41-9156-5b90bc19977c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.827520] env[68244]: DEBUG nova.network.neutron [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.935086] env[68244]: DEBUG nova.compute.utils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 779.938759] env[68244]: DEBUG nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 779.999983] env[68244]: INFO nova.compute.manager [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Rescuing [ 780.000338] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.000436] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.000583] env[68244]: DEBUG nova.network.neutron [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.127826] env[68244]: DEBUG oslo_vmware.api [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780212, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.257485] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780213, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.361070] env[68244]: DEBUG nova.network.neutron [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.439471] env[68244]: DEBUG nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 780.509867] env[68244]: DEBUG nova.network.neutron [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Updating instance_info_cache with network_info: [{"id": "3d483b86-624f-47ef-844a-5e5c7bf1d4ad", "address": "fa:16:3e:4d:31:0e", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d483b86-62", "ovs_interfaceid": "3d483b86-624f-47ef-844a-5e5c7bf1d4ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.631612] env[68244]: DEBUG oslo_vmware.api [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780212, 'name': PowerOffVM_Task, 'duration_secs': 0.582764} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.631736] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 780.631866] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 780.632894] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01c5c2d4-caf1-4569-aa35-68656da5b28d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.742133] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.742133] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.742133] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleting the datastore file [datastore2] 6abb889a-2e96-4aba-8e36-c4c8997dd4e2 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.744354] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25bd8f49-5070-4db5-bf7e-4d9c8986b140 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.756439] env[68244]: DEBUG oslo_vmware.api [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 780.756439] env[68244]: value = "task-2780215" [ 780.756439] env[68244]: _type = "Task" [ 780.756439] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.759689] env[68244]: DEBUG oslo_vmware.api [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780213, 'name': PowerOnVM_Task, 'duration_secs': 0.788773} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.765031] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.765031] env[68244]: INFO nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Took 8.90 seconds to spawn the instance on the hypervisor. [ 780.765031] env[68244]: DEBUG nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.765915] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e6c316-a35f-403a-baf0-444e30ec6344 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.774726] env[68244]: DEBUG oslo_vmware.api [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.833201] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Acquiring lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.833493] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.833760] env[68244]: INFO nova.compute.manager [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Attaching volume 4939cc0c-6de3-43bd-8978-3d724056d408 to /dev/sdb [ 780.877624] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a33043-8ca8-4dd3-b76f-adc7c1af1a9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.884616] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2edcf4-31fe-4337-a118-938ccef3e05d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.903728] env[68244]: DEBUG nova.virt.block_device [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Updating existing volume attachment record: e380b25d-6a24-4d39-ae3e-f2324bee82ef {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 781.013608] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-c9f5fbeb-28b6-4b41-9156-5b90bc19977c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.014119] env[68244]: DEBUG nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Instance network_info: |[{"id": "3d483b86-624f-47ef-844a-5e5c7bf1d4ad", "address": "fa:16:3e:4d:31:0e", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d483b86-62", "ovs_interfaceid": "3d483b86-624f-47ef-844a-5e5c7bf1d4ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 781.018414] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:31:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d483b86-624f-47ef-844a-5e5c7bf1d4ad', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.040986] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.042194] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 781.042545] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2eff5615-892a-4045-8d23-6c60e7df4618 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.073207] env[68244]: DEBUG nova.network.neutron [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Updating instance_info_cache with network_info: [{"id": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "address": "fa:16:3e:78:36:a6", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb55cce-a3", "ovs_interfaceid": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.082612] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.082612] env[68244]: value = "task-2780216" [ 781.082612] env[68244]: _type = "Task" [ 781.082612] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.090259] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b723beb-6e28-449a-ba76-fc7cc2707359 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.096504] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780216, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.100628] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0db028b-c86d-451e-a6a7-64c150e6a4f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.137634] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9967b6d5-8841-4e6e-801f-ad1a34d43860 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.146869] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a2505a-d7d2-4425-a891-a99a88abe80d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.161374] env[68244]: DEBUG nova.compute.provider_tree [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.269628] env[68244]: DEBUG oslo_vmware.api [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280455} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.269965] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.270209] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 781.270445] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 781.270633] env[68244]: INFO nova.compute.manager [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Took 1.67 seconds to destroy the instance on the hypervisor. [ 781.270895] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.271111] env[68244]: DEBUG nova.compute.manager [-] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 781.271207] env[68244]: DEBUG nova.network.neutron [-] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.292243] env[68244]: INFO nova.compute.manager [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Took 43.12 seconds to build instance. [ 781.455170] env[68244]: DEBUG nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 781.488904] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 781.489571] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 781.489571] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 781.489794] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 781.489991] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 781.490888] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 781.490888] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 781.490888] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 781.491227] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 781.491227] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 781.491389] env[68244]: DEBUG nova.virt.hardware [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 781.492803] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b85028-2e49-4ac9-bf58-0236a2393d02 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.503452] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1ad039-59a4-4cdf-b9cd-de238f75b400 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.524360] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.533219] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Creating folder: Project (6d423708a9ea4720b8f27faa9f67b0c5). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 781.533660] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87039417-a3ac-4912-a6c4-b31a6e7b77f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.545778] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Created folder: Project (6d423708a9ea4720b8f27faa9f67b0c5) in parent group-v558876. [ 781.546082] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Creating folder: Instances. Parent ref: group-v558986. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 781.546410] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c81f56a-538a-4404-b53d-08f32f37fd36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.556308] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Created folder: Instances in parent group-v558986. [ 781.556598] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.556859] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 781.557129] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d04d0e5-38ae-4331-9ebb-536ab4118bb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.579959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.588706] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.588706] env[68244]: value = "task-2780222" [ 781.588706] env[68244]: _type = "Task" [ 781.588706] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.597925] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780216, 'name': CreateVM_Task, 'duration_secs': 0.413512} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.600981] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 781.602500] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.602500] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.604642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 781.607547] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-720e2f35-67d5-4d4f-813e-b800637ac4f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.609866] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780222, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.618363] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 781.618363] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa6f2b-7740-9149-51c3-6cf4a29480f9" [ 781.618363] env[68244]: _type = "Task" [ 781.618363] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.629284] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa6f2b-7740-9149-51c3-6cf4a29480f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.664782] env[68244]: DEBUG nova.scheduler.client.report [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 781.771084] env[68244]: DEBUG nova.compute.manager [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Received event network-changed-3d483b86-624f-47ef-844a-5e5c7bf1d4ad {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 781.771084] env[68244]: DEBUG nova.compute.manager [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Refreshing instance network info cache due to event network-changed-3d483b86-624f-47ef-844a-5e5c7bf1d4ad. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 781.771084] env[68244]: DEBUG oslo_concurrency.lockutils [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] Acquiring lock "refresh_cache-c9f5fbeb-28b6-4b41-9156-5b90bc19977c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.771084] env[68244]: DEBUG oslo_concurrency.lockutils [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] Acquired lock "refresh_cache-c9f5fbeb-28b6-4b41-9156-5b90bc19977c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.771084] env[68244]: DEBUG nova.network.neutron [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Refreshing network info cache for port 3d483b86-624f-47ef-844a-5e5c7bf1d4ad {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.794411] env[68244]: DEBUG oslo_concurrency.lockutils [None req-225b23c2-1c78-4ea9-9af6-a57ce71bc622 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.402s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.902865] env[68244]: DEBUG nova.compute.manager [req-cb420fe6-b4b3-4072-b587-b6b6fad3aece req-832bf9ec-600b-4583-9322-318a5dc9781a service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Received event network-vif-deleted-e6164ba3-acf1-412b-b790-6713822c1144 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 781.903086] env[68244]: INFO nova.compute.manager [req-cb420fe6-b4b3-4072-b587-b6b6fad3aece req-832bf9ec-600b-4583-9322-318a5dc9781a service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Neutron deleted interface e6164ba3-acf1-412b-b790-6713822c1144; detaching it from the instance and deleting it from the info cache [ 781.903269] env[68244]: DEBUG nova.network.neutron [req-cb420fe6-b4b3-4072-b587-b6b6fad3aece req-832bf9ec-600b-4583-9322-318a5dc9781a service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.099048] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780222, 'name': CreateVM_Task, 'duration_secs': 0.340974} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.099270] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 782.100168] env[68244]: DEBUG oslo_vmware.service [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776d832f-93cf-43d9-85d2-674bdb67dd68 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.105564] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.105779] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.106157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 782.106376] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17decdd7-6e5c-4a86-8d14-3976c8440266 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.110583] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 782.110583] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52856155-bb26-caa2-74cf-0c9b1bac80eb" [ 782.110583] env[68244]: _type = "Task" [ 782.110583] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.119187] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52856155-bb26-caa2-74cf-0c9b1bac80eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.128124] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa6f2b-7740-9149-51c3-6cf4a29480f9, 'name': SearchDatastore_Task, 'duration_secs': 0.009932} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.128943] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.129199] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 782.129422] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.129567] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.129742] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.130238] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26899b5b-eaaa-4c46-9bf3-9cf8ce16956d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.138406] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.138406] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 782.139043] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ac78dbc-4f99-43d9-8d97-f8db5d9c6f15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.145724] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 782.145724] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52673303-dd47-a4ad-b248-7bf5ff709e54" [ 782.145724] env[68244]: _type = "Task" [ 782.145724] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.153983] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52673303-dd47-a4ad-b248-7bf5ff709e54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.163385] env[68244]: DEBUG nova.network.neutron [-] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.169945] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.741s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.170414] env[68244]: DEBUG nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 782.173498] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.032s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.174962] env[68244]: INFO nova.compute.claims [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.297259] env[68244]: DEBUG nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 782.406326] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64c136a3-ded9-4731-b9c8-a9537a96b924 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.416179] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d611b67a-86d4-4081-aa41-11bbd3680d0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.452772] env[68244]: DEBUG nova.compute.manager [req-cb420fe6-b4b3-4072-b587-b6b6fad3aece req-832bf9ec-600b-4583-9322-318a5dc9781a service nova] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Detach interface failed, port_id=e6164ba3-acf1-412b-b790-6713822c1144, reason: Instance 6abb889a-2e96-4aba-8e36-c4c8997dd4e2 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 782.566217] env[68244]: DEBUG nova.network.neutron [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Updated VIF entry in instance network info cache for port 3d483b86-624f-47ef-844a-5e5c7bf1d4ad. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 782.566591] env[68244]: DEBUG nova.network.neutron [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Updating instance_info_cache with network_info: [{"id": "3d483b86-624f-47ef-844a-5e5c7bf1d4ad", "address": "fa:16:3e:4d:31:0e", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d483b86-62", "ovs_interfaceid": "3d483b86-624f-47ef-844a-5e5c7bf1d4ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.623137] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.623890] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 782.624173] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.624914] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.624914] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.626145] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0bebb4f-bb5d-41a8-824e-9ac7bd8ad87a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.636418] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.636418] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 782.636418] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b836d878-78e3-46f6-adb4-93c8fd8dfc9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.646419] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bebb49c7-9bdf-4b07-bcbb-20db60b407a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.655813] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 782.655813] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7714a-6dbf-ed2c-f9c3-d4a109159135" [ 782.655813] env[68244]: _type = "Task" [ 782.655813] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.659883] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52673303-dd47-a4ad-b248-7bf5ff709e54, 'name': SearchDatastore_Task, 'duration_secs': 0.009482} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.663498] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e70bcb1-732c-4dae-986a-d1b8a0ebc433 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.667648] env[68244]: INFO nova.compute.manager [-] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Took 1.40 seconds to deallocate network for instance. [ 782.676942] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Preparing fetch location {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 782.676942] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Creating directory with path [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.678293] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 782.678293] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209e47c-93f8-1e4f-b8ba-c74b810eb7b2" [ 782.678293] env[68244]: _type = "Task" [ 782.678293] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.678293] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9ed45dd-4d6a-482b-8f84-fe063033ee18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.680326] env[68244]: DEBUG nova.compute.utils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 782.686685] env[68244]: DEBUG nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 782.686881] env[68244]: DEBUG nova.network.neutron [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 782.694265] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209e47c-93f8-1e4f-b8ba-c74b810eb7b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.717935] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Created directory with path [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.717935] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Fetch image to [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 782.717935] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Downloading image file data 9aa0b4d1-af1b-4141-9ca6-95525b722d7e to [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk on the data store datastore1 {{(pid=68244) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 782.717935] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6b5a24-d2c6-4e28-8ba9-5c45b269f075 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.726148] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9fcdd1-60bf-4c47-8c72-1bc7af07e665 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.736421] env[68244]: DEBUG nova.policy [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcb360676a0b4898a283980e7839c68a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15f251056bf64f719c7094479b569f0d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 782.739411] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2212c977-56ac-4f0b-aa3f-a05a33d13134 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.774595] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb7c50b-b83f-4ff5-8ea1-bbdcadd8f319 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.781362] env[68244]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f857bcc5-58ba-4e50-8dc3-fc76c24ca9b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.806533] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Downloading image file data 9aa0b4d1-af1b-4141-9ca6-95525b722d7e to the data store datastore1 {{(pid=68244) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 782.830911] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.867768] env[68244]: DEBUG oslo_vmware.rw_handles [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68244) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 783.069787] env[68244]: DEBUG oslo_concurrency.lockutils [req-7dee10ae-955f-40f1-8b62-0e7b665e77ad req-76b8632d-3204-4ed7-99a3-f8d350f19a14 service nova] Releasing lock "refresh_cache-c9f5fbeb-28b6-4b41-9156-5b90bc19977c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.111510] env[68244]: DEBUG nova.network.neutron [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Successfully created port: 16438b13-f5f7-472e-af75-2da5ea4e4568 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.130510] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.130779] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74d8e58f-3db2-4b33-ae7c-2facb6f4a0b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.137786] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 783.137786] env[68244]: value = "task-2780223" [ 783.137786] env[68244]: _type = "Task" [ 783.137786] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.153337] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780223, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.179884] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.191824] env[68244]: DEBUG nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 783.194518] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209e47c-93f8-1e4f-b8ba-c74b810eb7b2, 'name': SearchDatastore_Task, 'duration_secs': 0.020621} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.195536] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.195756] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 783.196046] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c6039d9-967c-422c-bc51-c9a34749a006 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.207213] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 783.207213] env[68244]: value = "task-2780224" [ 783.207213] env[68244]: _type = "Task" [ 783.207213] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.219805] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.655529] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780223, 'name': PowerOffVM_Task, 'duration_secs': 0.173743} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.656729] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 783.657880] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a89b5b-6da6-428b-bfc8-172108ba1a45 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.683913] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ff2452-852a-44ce-a2ef-0dc0a3abf13f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.694949] env[68244]: DEBUG oslo_vmware.rw_handles [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Completed reading data from the image iterator. {{(pid=68244) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 783.695243] env[68244]: DEBUG oslo_vmware.rw_handles [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 783.720242] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780224, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.732069] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.732518] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d752667e-1289-4222-a53f-1f3ee67deca8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.742872] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 783.742872] env[68244]: value = "task-2780226" [ 783.742872] env[68244]: _type = "Task" [ 783.742872] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.752586] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 783.752934] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.753234] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.753392] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.753568] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.754272] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4cab67b-c03c-42b1-8413-45d0f3624a36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.762166] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.762506] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 783.763535] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55bf430f-17c1-4f1c-973e-a7b520e09cce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.771746] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 783.771746] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e42c0e-e0a4-cef9-a2eb-f093498e5c79" [ 783.771746] env[68244]: _type = "Task" [ 783.771746] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.775566] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Downloaded image file data 9aa0b4d1-af1b-4141-9ca6-95525b722d7e to vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk on the data store datastore1 {{(pid=68244) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 783.777917] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Caching image {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 783.778204] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Copying Virtual Disk [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk to [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 783.778448] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-019762f2-3c7a-4806-a9e4-d71f02bcdcd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.788982] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e42c0e-e0a4-cef9-a2eb-f093498e5c79, 'name': SearchDatastore_Task, 'duration_secs': 0.008827} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.790996] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 783.790996] env[68244]: value = "task-2780227" [ 783.790996] env[68244]: _type = "Task" [ 783.790996] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.791833] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c81768fd-1f5d-489b-ad3d-97eb55dffd05 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.805912] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 783.805912] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5222eaf0-0346-c8d2-6cd7-51e73b294af3" [ 783.805912] env[68244]: _type = "Task" [ 783.805912] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.808251] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.813553] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.813756] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.814737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.814737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.814737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.821998] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5222eaf0-0346-c8d2-6cd7-51e73b294af3, 'name': SearchDatastore_Task, 'duration_secs': 0.009202} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.822729] env[68244]: INFO nova.compute.manager [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Terminating instance [ 783.824706] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.824706] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. {{(pid=68244) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 783.825720] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbe141cc-6d08-4cfc-900f-d36cd67a3055 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.832406] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 783.832406] env[68244]: value = "task-2780228" [ 783.832406] env[68244]: _type = "Task" [ 783.832406] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.841503] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.885162] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208ef15e-45bc-4124-88d3-5fff4915704a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.893185] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252137d0-ac59-4775-a9c9-bbc0fb6de987 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.925120] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a2c8b7-2668-4e7e-96ec-1c6449a2529c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.933104] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1f250a-0552-45c2-8c54-5708e69af88f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.946563] env[68244]: DEBUG nova.compute.provider_tree [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.201315] env[68244]: DEBUG nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 784.220326] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780224, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525843} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.222702] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 784.222955] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 784.223547] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-776df6d9-390c-4701-abcd-09f444bd1235 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.230332] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 784.230600] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.231362] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 784.231362] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.231362] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 784.231362] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 784.231567] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 784.231603] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 784.231737] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 784.231893] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 784.232071] env[68244]: DEBUG nova.virt.hardware [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 784.233499] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5a50ef-c6d3-452a-8777-995c5db3bdbd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.243875] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0860302-462e-424b-be9a-82e41911ca57 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.249729] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 784.249729] env[68244]: value = "task-2780229" [ 784.249729] env[68244]: _type = "Task" [ 784.249729] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.267523] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.303460] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780227, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.330528] env[68244]: DEBUG nova.compute.manager [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 784.330528] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.330842] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bcc4f9-94c9-4a10-b084-2219c78f1afe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.342156] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 784.346357] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4770b0d-cd6a-4852-9c05-d71a6c3f1761 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.348837] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490624} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.349471] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. [ 784.351354] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a064ad08-a0a1-4394-ae60-5e2cf5cbd425 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.356487] env[68244]: DEBUG oslo_vmware.api [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 784.356487] env[68244]: value = "task-2780230" [ 784.356487] env[68244]: _type = "Task" [ 784.356487] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.382034] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.382703] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3af49b67-b3cd-47fc-91ba-5a6b0eac6d30 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.398816] env[68244]: DEBUG oslo_vmware.api [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.404065] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 784.404065] env[68244]: value = "task-2780231" [ 784.404065] env[68244]: _type = "Task" [ 784.404065] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.412048] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.452335] env[68244]: DEBUG nova.scheduler.client.report [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.762218] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139195} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.762218] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 784.762218] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c3f4c7-f277-4d43-b828-da8814e29993 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.794974] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.796051] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31bbcbbc-5c78-4331-9310-a86d30749796 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.819985] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780227, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.821333] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 784.821333] env[68244]: value = "task-2780232" [ 784.821333] env[68244]: _type = "Task" [ 784.821333] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.829803] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780232, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.868675] env[68244]: DEBUG oslo_vmware.api [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780230, 'name': PowerOffVM_Task, 'duration_secs': 0.273288} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.868940] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.869125] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.869381] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3928559-15d3-4f20-bd28-c209a4e8e71b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.901880] env[68244]: DEBUG nova.compute.manager [req-b13ab336-09b9-483a-a328-e08b315eb00c req-2f0a02dc-a2dc-4f87-931e-f651aa8a8c59 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Received event network-vif-plugged-16438b13-f5f7-472e-af75-2da5ea4e4568 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 784.902197] env[68244]: DEBUG oslo_concurrency.lockutils [req-b13ab336-09b9-483a-a328-e08b315eb00c req-2f0a02dc-a2dc-4f87-931e-f651aa8a8c59 service nova] Acquiring lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.902320] env[68244]: DEBUG oslo_concurrency.lockutils [req-b13ab336-09b9-483a-a328-e08b315eb00c req-2f0a02dc-a2dc-4f87-931e-f651aa8a8c59 service nova] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.902464] env[68244]: DEBUG oslo_concurrency.lockutils [req-b13ab336-09b9-483a-a328-e08b315eb00c req-2f0a02dc-a2dc-4f87-931e-f651aa8a8c59 service nova] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.902623] env[68244]: DEBUG nova.compute.manager [req-b13ab336-09b9-483a-a328-e08b315eb00c req-2f0a02dc-a2dc-4f87-931e-f651aa8a8c59 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] No waiting events found dispatching network-vif-plugged-16438b13-f5f7-472e-af75-2da5ea4e4568 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 784.902785] env[68244]: WARNING nova.compute.manager [req-b13ab336-09b9-483a-a328-e08b315eb00c req-2f0a02dc-a2dc-4f87-931e-f651aa8a8c59 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Received unexpected event network-vif-plugged-16438b13-f5f7-472e-af75-2da5ea4e4568 for instance with vm_state building and task_state spawning. [ 784.914793] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.949066] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.949066] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.949066] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Deleting the datastore file [datastore2] d1fb6fff-b1b7-4c1b-8995-41628cadf7d5 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.949066] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-839a51aa-225f-40d5-ad62-32c35d0f62ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.954996] env[68244]: DEBUG oslo_vmware.api [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for the task: (returnval){ [ 784.954996] env[68244]: value = "task-2780234" [ 784.954996] env[68244]: _type = "Task" [ 784.954996] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.958823] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.785s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.959315] env[68244]: DEBUG nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 784.962210] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.407s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.962427] env[68244]: DEBUG nova.objects.instance [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lazy-loading 'resources' on Instance uuid 59b0dd89-0093-4e50-9428-8db5c7fd429d {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.973425] env[68244]: DEBUG oslo_vmware.api [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.058466] env[68244]: DEBUG nova.network.neutron [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Successfully updated port: 16438b13-f5f7-472e-af75-2da5ea4e4568 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 785.304858] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780227, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.331314] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780232, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.415033] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.469153] env[68244]: DEBUG nova.compute.utils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 785.471086] env[68244]: DEBUG oslo_vmware.api [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Task: {'id': task-2780234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172535} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.472294] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 785.472511] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558985', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'name': 'volume-4939cc0c-6de3-43bd-8978-3d724056d408', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c', 'attached_at': '', 'detached_at': '', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'serial': '4939cc0c-6de3-43bd-8978-3d724056d408'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 785.473117] env[68244]: DEBUG nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 785.473401] env[68244]: DEBUG nova.network.neutron [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.475050] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 785.475239] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 785.475495] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 785.475577] env[68244]: INFO nova.compute.manager [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 785.475806] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.476599] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fd5c12-130b-4840-b510-ca8c0e62d43e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.479497] env[68244]: DEBUG nova.compute.manager [-] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 785.479602] env[68244]: DEBUG nova.network.neutron [-] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.499414] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f3ade6-46e5-4f53-8b7b-72701e1f6d87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.526657] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] volume-4939cc0c-6de3-43bd-8978-3d724056d408/volume-4939cc0c-6de3-43bd-8978-3d724056d408.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 785.529563] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78734bef-5d4b-459f-a26d-ec51132b3a7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.549241] env[68244]: DEBUG nova.policy [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcb360676a0b4898a283980e7839c68a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15f251056bf64f719c7094479b569f0d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 785.550511] env[68244]: DEBUG oslo_vmware.api [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Waiting for the task: (returnval){ [ 785.550511] env[68244]: value = "task-2780235" [ 785.550511] env[68244]: _type = "Task" [ 785.550511] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.559074] env[68244]: DEBUG oslo_vmware.api [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780235, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.562503] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "refresh_cache-cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.562668] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "refresh_cache-cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.562899] env[68244]: DEBUG nova.network.neutron [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.778044] env[68244]: DEBUG nova.compute.manager [req-b7c552fa-f49d-43f0-8474-f7d36c8f21c5 req-949dc252-9a3c-4066-8d8f-6ffb981fdb8a service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Received event network-vif-deleted-1599a78e-a9aa-40b6-a19e-a36bfd719afc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 785.778344] env[68244]: INFO nova.compute.manager [req-b7c552fa-f49d-43f0-8474-f7d36c8f21c5 req-949dc252-9a3c-4066-8d8f-6ffb981fdb8a service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Neutron deleted interface 1599a78e-a9aa-40b6-a19e-a36bfd719afc; detaching it from the instance and deleting it from the info cache [ 785.778495] env[68244]: DEBUG nova.network.neutron [req-b7c552fa-f49d-43f0-8474-f7d36c8f21c5 req-949dc252-9a3c-4066-8d8f-6ffb981fdb8a service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.810318] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780227, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.899726} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.810598] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Copied Virtual Disk [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk to [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 785.810762] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleting the datastore file [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/tmp-sparse.vmdk {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 785.810996] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15170512-5989-4895-9cb2-03d3a713557a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.817732] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 785.817732] env[68244]: value = "task-2780236" [ 785.817732] env[68244]: _type = "Task" [ 785.817732] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.826641] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.838679] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780232, 'name': ReconfigVM_Task, 'duration_secs': 0.658479} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.838961] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Reconfigured VM instance instance-00000023 to attach disk [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 785.839657] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ec9be7e-bfe8-45c0-b6b6-94232a910640 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.846206] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 785.846206] env[68244]: value = "task-2780237" [ 785.846206] env[68244]: _type = "Task" [ 785.846206] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.854283] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780237, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.917931] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.972180] env[68244]: DEBUG nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 786.013969] env[68244]: DEBUG nova.network.neutron [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Successfully created port: 34e1c670-8287-43d2-9eac-d13b3e5a5c47 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.064827] env[68244]: DEBUG oslo_vmware.api [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.115326] env[68244]: DEBUG nova.network.neutron [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.130826] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4dcf70-a81f-4859-81ab-fe7ee54c5c6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.138639] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770a65b2-2947-4c76-b30a-37b64855eca0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.180262] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a426dec-1b12-49a6-b572-341acb5d3e1b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.189580] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842af6fa-fceb-4a09-a536-37725b4bb306 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.204776] env[68244]: DEBUG nova.compute.provider_tree [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 786.254432] env[68244]: DEBUG nova.network.neutron [-] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.282252] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d697717-097b-4df5-bbcd-80e4006e41ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.293485] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4b1754-4f44-42b5-9138-25b294c45269 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.328472] env[68244]: DEBUG nova.compute.manager [req-b7c552fa-f49d-43f0-8474-f7d36c8f21c5 req-949dc252-9a3c-4066-8d8f-6ffb981fdb8a service nova] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Detach interface failed, port_id=1599a78e-a9aa-40b6-a19e-a36bfd719afc, reason: Instance d1fb6fff-b1b7-4c1b-8995-41628cadf7d5 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 786.343466] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.343466] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 786.343466] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Moving file from [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e to [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e. {{(pid=68244) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 786.343466] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b5f2b922-0286-4989-838b-08f5e7ba9730 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.351874] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 786.351874] env[68244]: value = "task-2780238" [ 786.351874] env[68244]: _type = "Task" [ 786.351874] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.358505] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780237, 'name': Rename_Task, 'duration_secs': 0.165242} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.362420] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 786.362420] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c63d210-c3ed-4434-ae1e-a7f3bc75644d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.363937] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780238, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.370746] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 786.370746] env[68244]: value = "task-2780239" [ 786.370746] env[68244]: _type = "Task" [ 786.370746] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.379940] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780239, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.394698] env[68244]: DEBUG nova.network.neutron [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Updating instance_info_cache with network_info: [{"id": "16438b13-f5f7-472e-af75-2da5ea4e4568", "address": "fa:16:3e:fb:32:a6", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16438b13-f5", "ovs_interfaceid": "16438b13-f5f7-472e-af75-2da5ea4e4568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.416262] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780231, 'name': ReconfigVM_Task, 'duration_secs': 1.729683} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.417280] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Reconfigured VM instance instance-0000001b to attach disk [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.418301] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2939849-d0aa-402f-91ed-5ba0cd3eb695 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.446369] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87687c85-63af-4f4a-bf52-654b56699efb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.465302] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 786.465302] env[68244]: value = "task-2780240" [ 786.465302] env[68244]: _type = "Task" [ 786.465302] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.474407] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780240, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.562432] env[68244]: DEBUG oslo_vmware.api [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780235, 'name': ReconfigVM_Task, 'duration_secs': 0.690644} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.562773] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Reconfigured VM instance instance-0000000c to attach disk [datastore1] volume-4939cc0c-6de3-43bd-8978-3d724056d408/volume-4939cc0c-6de3-43bd-8978-3d724056d408.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.567684] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c559cbc-54b2-402f-a2cb-d4c04e1a8f9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.584749] env[68244]: DEBUG oslo_vmware.api [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Waiting for the task: (returnval){ [ 786.584749] env[68244]: value = "task-2780241" [ 786.584749] env[68244]: _type = "Task" [ 786.584749] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.593220] env[68244]: DEBUG oslo_vmware.api [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780241, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.727291] env[68244]: ERROR nova.scheduler.client.report [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] [req-b82f86cc-0b3b-4604-954a-58ab3a7c9c17] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b82f86cc-0b3b-4604-954a-58ab3a7c9c17"}]} [ 786.747276] env[68244]: DEBUG nova.scheduler.client.report [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 786.758303] env[68244]: INFO nova.compute.manager [-] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Took 1.28 seconds to deallocate network for instance. [ 786.763638] env[68244]: DEBUG nova.scheduler.client.report [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 786.763852] env[68244]: DEBUG nova.compute.provider_tree [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 786.776239] env[68244]: DEBUG nova.scheduler.client.report [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 786.795181] env[68244]: DEBUG nova.scheduler.client.report [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 786.862143] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780238, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.03124} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.862447] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] File moved {{(pid=68244) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 786.862572] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Cleaning up location [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 786.863050] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleting the datastore file [datastore1] vmware_temp/e6c58659-5a1f-48ba-84a1-8a99bf76a57e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.863050] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0362eae-f31a-4321-925a-30c406723f57 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.872636] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 786.872636] env[68244]: value = "task-2780242" [ 786.872636] env[68244]: _type = "Task" [ 786.872636] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.884261] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780239, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.887547] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.897723] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "refresh_cache-cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.898124] env[68244]: DEBUG nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance network_info: |[{"id": "16438b13-f5f7-472e-af75-2da5ea4e4568", "address": "fa:16:3e:fb:32:a6", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16438b13-f5", "ovs_interfaceid": "16438b13-f5f7-472e-af75-2da5ea4e4568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 786.898915] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:32:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16438b13-f5f7-472e-af75-2da5ea4e4568', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.906736] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating folder: Project (15f251056bf64f719c7094479b569f0d). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.909629] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33bca2f2-0195-4f14-854a-666d275cb762 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.920075] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created folder: Project (15f251056bf64f719c7094479b569f0d) in parent group-v558876. [ 786.920342] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating folder: Instances. Parent ref: group-v558989. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.920647] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56e522ae-0f31-4c11-ab5e-d0d114694df8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.930118] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created folder: Instances in parent group-v558989. [ 786.930310] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 786.932879] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.938020] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-459a98c0-195c-4eaf-88d8-a4d51d0e4ea0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.952828] env[68244]: DEBUG nova.compute.manager [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Received event network-changed-16438b13-f5f7-472e-af75-2da5ea4e4568 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 786.955353] env[68244]: DEBUG nova.compute.manager [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Refreshing instance network info cache due to event network-changed-16438b13-f5f7-472e-af75-2da5ea4e4568. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 786.955353] env[68244]: DEBUG oslo_concurrency.lockutils [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] Acquiring lock "refresh_cache-cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.955353] env[68244]: DEBUG oslo_concurrency.lockutils [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] Acquired lock "refresh_cache-cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.955353] env[68244]: DEBUG nova.network.neutron [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Refreshing network info cache for port 16438b13-f5f7-472e-af75-2da5ea4e4568 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.962568] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.962568] env[68244]: value = "task-2780245" [ 786.962568] env[68244]: _type = "Task" [ 786.962568] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.978330] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780245, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.979255] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780240, 'name': ReconfigVM_Task, 'duration_secs': 0.317286} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.979507] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 786.979746] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d1ab616-aec5-441b-9431-8e59d5a5984c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.984637] env[68244]: DEBUG nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 786.988188] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 786.988188] env[68244]: value = "task-2780246" [ 786.988188] env[68244]: _type = "Task" [ 786.988188] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.999637] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780246, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.025766] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.026155] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.026415] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.026698] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.026935] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.027349] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.027531] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.028059] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.028059] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.028395] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.028492] env[68244]: DEBUG nova.virt.hardware [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.030362] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140c0076-2293-43f9-a697-3be91771ee27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.046214] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76e2496-2bd4-4ca9-b514-10525b8aa8df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.097166] env[68244]: DEBUG oslo_vmware.api [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780241, 'name': ReconfigVM_Task, 'duration_secs': 0.138817} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.097544] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558985', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'name': 'volume-4939cc0c-6de3-43bd-8978-3d724056d408', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c', 'attached_at': '', 'detached_at': '', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'serial': '4939cc0c-6de3-43bd-8978-3d724056d408'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 787.267583] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.388894] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780239, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.390228] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030801} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.390457] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 787.391179] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4d11537-0a5c-40ac-9ca6-db7a98c726dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.397348] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 787.397348] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5210c275-6450-827b-81ef-3d2606a8c745" [ 787.397348] env[68244]: _type = "Task" [ 787.397348] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.406138] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5210c275-6450-827b-81ef-3d2606a8c745, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.444832] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e90f8a0-75e7-4639-a16a-f05240fb749c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.453446] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd9e555-edb2-4cef-b3bf-b2247e3686d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.493338] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace6748e-afbb-4c10-bd54-92177b4447fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.505023] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780245, 'name': CreateVM_Task, 'duration_secs': 0.383572} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.506061] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 787.507024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.507208] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.507991] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 787.513349] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a772c7-cc63-412c-bc88-2271ed307257 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.514371] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780246, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.515736] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cef2ebf-56c0-4e68-99c4-8f4b69bc193c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.523151] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 787.523151] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529d1676-df9e-0261-0bb6-98b28acd81b7" [ 787.523151] env[68244]: _type = "Task" [ 787.523151] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.531106] env[68244]: DEBUG nova.compute.provider_tree [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 787.541753] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529d1676-df9e-0261-0bb6-98b28acd81b7, 'name': SearchDatastore_Task, 'duration_secs': 0.009863} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.542822] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.543097] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.543288] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.761102] env[68244]: DEBUG nova.network.neutron [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Updated VIF entry in instance network info cache for port 16438b13-f5f7-472e-af75-2da5ea4e4568. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.761997] env[68244]: DEBUG nova.network.neutron [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Updating instance_info_cache with network_info: [{"id": "16438b13-f5f7-472e-af75-2da5ea4e4568", "address": "fa:16:3e:fb:32:a6", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16438b13-f5", "ovs_interfaceid": "16438b13-f5f7-472e-af75-2da5ea4e4568", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.850949] env[68244]: DEBUG nova.network.neutron [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Successfully updated port: 34e1c670-8287-43d2-9eac-d13b3e5a5c47 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.888121] env[68244]: DEBUG oslo_vmware.api [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780239, 'name': PowerOnVM_Task, 'duration_secs': 1.422896} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.888427] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 787.888662] env[68244]: INFO nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Took 9.16 seconds to spawn the instance on the hypervisor. [ 787.888891] env[68244]: DEBUG nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 787.889953] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b803b9-06af-4d1b-b92d-92ee6e67abee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.912442] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5210c275-6450-827b-81ef-3d2606a8c745, 'name': SearchDatastore_Task, 'duration_secs': 0.009974} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.912442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.912442] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.912442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.912659] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.912762] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bf36462-3e71-4118-b95d-b3fd2eec4c09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.915446] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d9784b6-e8e4-4a46-a1e8-c9892dd279f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.924922] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 787.924922] env[68244]: value = "task-2780247" [ 787.924922] env[68244]: _type = "Task" [ 787.924922] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.926133] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.926315] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.930651] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0a774c1-1ea8-4eec-a226-f5ba7b9b8a3e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.939112] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.941306] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 787.941306] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209fe60-5443-5a19-8293-728d71c79eea" [ 787.941306] env[68244]: _type = "Task" [ 787.941306] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.951773] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209fe60-5443-5a19-8293-728d71c79eea, 'name': SearchDatastore_Task, 'duration_secs': 0.009139} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.954439] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a55fdf04-7df7-40b4-868e-a709731d1841 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.959276] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 787.959276] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d2326a-8b2e-87cf-1765-afe74d0a21db" [ 787.959276] env[68244]: _type = "Task" [ 787.959276] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.967970] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d2326a-8b2e-87cf-1765-afe74d0a21db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.002141] env[68244]: DEBUG oslo_vmware.api [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780246, 'name': PowerOnVM_Task, 'duration_secs': 0.596998} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.002600] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 788.006021] env[68244]: DEBUG nova.compute.manager [None req-4eb1dd74-67fa-4d89-89e8-1ccbe6f90e93 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 788.007043] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88fc5bf-601a-408a-ba5c-184bb395e3e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.082140] env[68244]: DEBUG nova.scheduler.client.report [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 788.082727] env[68244]: DEBUG nova.compute.provider_tree [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 64 to 65 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 788.083131] env[68244]: DEBUG nova.compute.provider_tree [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.143045] env[68244]: DEBUG nova.objects.instance [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lazy-loading 'flavor' on Instance uuid 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.266396] env[68244]: DEBUG oslo_concurrency.lockutils [req-9f08e2f4-5fc8-461e-a907-6c10a0639a84 req-1ff49556-a724-44df-a377-0f020597d853 service nova] Releasing lock "refresh_cache-cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.357516] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "refresh_cache-f579141b-1fac-4541-99c3-07644a0a358c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.357516] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "refresh_cache-f579141b-1fac-4541-99c3-07644a0a358c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.357516] env[68244]: DEBUG nova.network.neutron [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.416676] env[68244]: INFO nova.compute.manager [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Took 43.33 seconds to build instance. [ 788.437436] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780247, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505817} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.438424] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.438629] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.438924] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-835ed444-c612-4046-ada7-c7fe13d3aa41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.449657] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 788.449657] env[68244]: value = "task-2780248" [ 788.449657] env[68244]: _type = "Task" [ 788.449657] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.461434] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.473042] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d2326a-8b2e-87cf-1765-afe74d0a21db, 'name': SearchDatastore_Task, 'duration_secs': 0.008554} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.473494] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.473913] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 788.476024] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e2b771c-d053-4b6e-89dc-94955120848b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.482026] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 788.482026] env[68244]: value = "task-2780249" [ 788.482026] env[68244]: _type = "Task" [ 788.482026] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.491868] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.591109] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.629s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.593424] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.471s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.595740] env[68244]: INFO nova.compute.claims [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.622095] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Acquiring lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.625411] env[68244]: INFO nova.scheduler.client.report [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Deleted allocations for instance 59b0dd89-0093-4e50-9428-8db5c7fd429d [ 788.649697] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12d404e3-0847-4447-a8b0-b106ce5c00aa tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.816s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.650719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.029s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.896357] env[68244]: DEBUG nova.network.neutron [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.921141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3ae7d57-ea1a-48e7-ae80-770364ad0fb8 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.882s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.959864] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072154} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.960821] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.960963] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1961123c-a4c9-4333-9a8f-85a1f6165733 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.980848] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.981142] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99aa5440-bf11-4129-be7a-fe0793ab90c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.007175] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.008978] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 789.008978] env[68244]: value = "task-2780250" [ 789.008978] env[68244]: _type = "Task" [ 789.008978] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.016254] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780250, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.136394] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82a77a3f-1206-4175-83fc-90562860f873 tempest-ImagesOneServerTestJSON-1700659778 tempest-ImagesOneServerTestJSON-1700659778-project-member] Lock "59b0dd89-0093-4e50-9428-8db5c7fd429d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.148s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.153607] env[68244]: INFO nova.compute.manager [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Detaching volume 4939cc0c-6de3-43bd-8978-3d724056d408 [ 789.156510] env[68244]: DEBUG nova.network.neutron [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Updating instance_info_cache with network_info: [{"id": "34e1c670-8287-43d2-9eac-d13b3e5a5c47", "address": "fa:16:3e:f4:50:21", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e1c670-82", "ovs_interfaceid": "34e1c670-8287-43d2-9eac-d13b3e5a5c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.196550] env[68244]: INFO nova.virt.block_device [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Attempting to driver detach volume 4939cc0c-6de3-43bd-8978-3d724056d408 from mountpoint /dev/sdb [ 789.196692] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 789.196918] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558985', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'name': 'volume-4939cc0c-6de3-43bd-8978-3d724056d408', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c', 'attached_at': '', 'detached_at': '', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'serial': '4939cc0c-6de3-43bd-8978-3d724056d408'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 789.197805] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb402d5-cf4c-4533-98c6-28ed389bff47 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.222998] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ae932d-c836-448e-8bde-e380b4e816d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.232050] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4add9b5a-d762-46c1-87df-30b7beb7bbb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.252857] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3638c3ff-9dc7-4163-8fe6-ef51ed9ff1a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.268190] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] The volume has not been displaced from its original location: [datastore1] volume-4939cc0c-6de3-43bd-8978-3d724056d408/volume-4939cc0c-6de3-43bd-8978-3d724056d408.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 789.273550] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Reconfiguring VM instance instance-0000000c to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 789.273855] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c819f221-1924-4ac6-9e5f-96dac261636f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.292235] env[68244]: DEBUG oslo_vmware.api [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Waiting for the task: (returnval){ [ 789.292235] env[68244]: value = "task-2780251" [ 789.292235] env[68244]: _type = "Task" [ 789.292235] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.299214] env[68244]: DEBUG oslo_vmware.api [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780251, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.406267] env[68244]: DEBUG nova.compute.manager [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Received event network-vif-plugged-34e1c670-8287-43d2-9eac-d13b3e5a5c47 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 789.406450] env[68244]: DEBUG oslo_concurrency.lockutils [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] Acquiring lock "f579141b-1fac-4541-99c3-07644a0a358c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.407068] env[68244]: DEBUG oslo_concurrency.lockutils [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] Lock "f579141b-1fac-4541-99c3-07644a0a358c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.407068] env[68244]: DEBUG oslo_concurrency.lockutils [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] Lock "f579141b-1fac-4541-99c3-07644a0a358c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.407068] env[68244]: DEBUG nova.compute.manager [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] No waiting events found dispatching network-vif-plugged-34e1c670-8287-43d2-9eac-d13b3e5a5c47 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 789.408047] env[68244]: WARNING nova.compute.manager [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Received unexpected event network-vif-plugged-34e1c670-8287-43d2-9eac-d13b3e5a5c47 for instance with vm_state building and task_state spawning. [ 789.408218] env[68244]: DEBUG nova.compute.manager [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Received event network-changed-34e1c670-8287-43d2-9eac-d13b3e5a5c47 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 789.409057] env[68244]: DEBUG nova.compute.manager [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Refreshing instance network info cache due to event network-changed-34e1c670-8287-43d2-9eac-d13b3e5a5c47. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 789.409057] env[68244]: DEBUG oslo_concurrency.lockutils [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] Acquiring lock "refresh_cache-f579141b-1fac-4541-99c3-07644a0a358c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.424558] env[68244]: DEBUG nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 789.510435] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780249, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.519837] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780250, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.664024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "refresh_cache-f579141b-1fac-4541-99c3-07644a0a358c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.664388] env[68244]: DEBUG nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Instance network_info: |[{"id": "34e1c670-8287-43d2-9eac-d13b3e5a5c47", "address": "fa:16:3e:f4:50:21", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e1c670-82", "ovs_interfaceid": "34e1c670-8287-43d2-9eac-d13b3e5a5c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 789.667472] env[68244]: DEBUG oslo_concurrency.lockutils [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] Acquired lock "refresh_cache-f579141b-1fac-4541-99c3-07644a0a358c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.667676] env[68244]: DEBUG nova.network.neutron [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Refreshing network info cache for port 34e1c670-8287-43d2-9eac-d13b3e5a5c47 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.669106] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:50:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34e1c670-8287-43d2-9eac-d13b3e5a5c47', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.677416] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 789.678828] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.679299] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0108a7ec-d6d4-4c05-8b13-b874f36a2ac1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.705457] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.705457] env[68244]: value = "task-2780252" [ 789.705457] env[68244]: _type = "Task" [ 789.705457] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.714600] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780252, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.802382] env[68244]: DEBUG oslo_vmware.api [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780251, 'name': ReconfigVM_Task, 'duration_secs': 0.364985} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.802713] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Reconfigured VM instance instance-0000000c to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 789.807753] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e175b2b6-2644-4c4d-9dbf-07ec7d096341 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.825985] env[68244]: DEBUG oslo_vmware.api [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Waiting for the task: (returnval){ [ 789.825985] env[68244]: value = "task-2780253" [ 789.825985] env[68244]: _type = "Task" [ 789.825985] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.835133] env[68244]: DEBUG oslo_vmware.api [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.954966] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.012470] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780249, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.024076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.024076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.030038] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780250, 'name': ReconfigVM_Task, 'duration_secs': 0.65956} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.030038] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Reconfigured VM instance instance-00000024 to attach disk [datastore1] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.030038] env[68244]: INFO nova.compute.manager [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Unrescuing [ 790.030038] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.030038] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.030038] env[68244]: DEBUG nova.network.neutron [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 790.031255] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a8c25d3-c49c-4fc1-949c-fb3710d32a80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.041090] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 790.041090] env[68244]: value = "task-2780254" [ 790.041090] env[68244]: _type = "Task" [ 790.041090] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.053173] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780254, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.217765] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780252, 'name': CreateVM_Task, 'duration_secs': 0.459679} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.217999] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 790.218903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.219129] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.219485] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 790.222125] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc296560-a8a6-4f30-adb0-fc757d1a03a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.227450] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 790.227450] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52513d81-a1b6-0875-d1e7-2281ea3e53ed" [ 790.227450] env[68244]: _type = "Task" [ 790.227450] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.229635] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9fde88-c603-4a1a-a2eb-b6f1981a8bea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.248204] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3422b90f-6261-4552-9ee0-cc0aca09fc70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.252532] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52513d81-a1b6-0875-d1e7-2281ea3e53ed, 'name': SearchDatastore_Task, 'duration_secs': 0.008967} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.253789] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.253789] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.254090] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.254350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.254576] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.255506] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-046708f9-6f92-4720-9a33-b7fb6a9bacfb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.299942] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26e526c-be74-4569-80a1-a46158b669f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.307527] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.307742] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 790.308556] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3944efdd-f311-414d-8ff9-a8e0a2dc21bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.315263] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61fe6a6-38ba-42d1-9d39-dde194f3a872 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.322472] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 790.322472] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f80761-d5ea-daf7-e495-c72be233b33b" [ 790.322472] env[68244]: _type = "Task" [ 790.322472] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.333381] env[68244]: DEBUG nova.compute.provider_tree [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.350096] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f80761-d5ea-daf7-e495-c72be233b33b, 'name': SearchDatastore_Task, 'duration_secs': 0.008952} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.350332] env[68244]: DEBUG oslo_vmware.api [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Task: {'id': task-2780253, 'name': ReconfigVM_Task, 'duration_secs': 0.177864} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.351772] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558985', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'name': 'volume-4939cc0c-6de3-43bd-8978-3d724056d408', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c', 'attached_at': '', 'detached_at': '', 'volume_id': '4939cc0c-6de3-43bd-8978-3d724056d408', 'serial': '4939cc0c-6de3-43bd-8978-3d724056d408'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 790.354192] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269b7e50-b83e-4681-abe8-84e2a5357b27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.359710] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 790.359710] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52455705-9f41-a357-caa3-d1a3548c1017" [ 790.359710] env[68244]: _type = "Task" [ 790.359710] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.368415] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52455705-9f41-a357-caa3-d1a3548c1017, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.510547] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780249, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.525728] env[68244]: DEBUG nova.network.neutron [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Updated VIF entry in instance network info cache for port 34e1c670-8287-43d2-9eac-d13b3e5a5c47. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.525728] env[68244]: DEBUG nova.network.neutron [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Updating instance_info_cache with network_info: [{"id": "34e1c670-8287-43d2-9eac-d13b3e5a5c47", "address": "fa:16:3e:f4:50:21", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e1c670-82", "ovs_interfaceid": "34e1c670-8287-43d2-9eac-d13b3e5a5c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.557312] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780254, 'name': Rename_Task, 'duration_secs': 0.236032} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.557487] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.557920] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27d6f411-74cf-4b9c-b09f-4ca59937a061 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.564631] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 790.564631] env[68244]: value = "task-2780255" [ 790.564631] env[68244]: _type = "Task" [ 790.564631] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.573763] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.845263] env[68244]: DEBUG nova.scheduler.client.report [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.874882] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52455705-9f41-a357-caa3-d1a3548c1017, 'name': SearchDatastore_Task, 'duration_secs': 0.009211} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.875351] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.875679] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] f579141b-1fac-4541-99c3-07644a0a358c/f579141b-1fac-4541-99c3-07644a0a358c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 790.876202] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2535e396-fb56-41a7-9603-a5a4440bd578 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.881045] env[68244]: DEBUG nova.network.neutron [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Updating instance_info_cache with network_info: [{"id": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "address": "fa:16:3e:78:36:a6", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fb55cce-a3", "ovs_interfaceid": "4fb55cce-a3f8-40f2-92e8-9f7166bcbf26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.886086] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 790.886086] env[68244]: value = "task-2780256" [ 790.886086] env[68244]: _type = "Task" [ 790.886086] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.895413] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.930439] env[68244]: DEBUG nova.objects.instance [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lazy-loading 'flavor' on Instance uuid 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 791.012343] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780249, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.411462} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.012832] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.013459] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.013722] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c198dced-a500-4674-ba14-411c2c897bcf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.019913] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 791.019913] env[68244]: value = "task-2780257" [ 791.019913] env[68244]: _type = "Task" [ 791.019913] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.029503] env[68244]: DEBUG oslo_concurrency.lockutils [req-d2a9f67d-1dd9-47d0-aa2a-cb0c0dd8adf3 req-ad24060b-60f3-461e-b713-a93abd2b7a3b service nova] Releasing lock "refresh_cache-f579141b-1fac-4541-99c3-07644a0a358c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.030110] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.059182] env[68244]: INFO nova.compute.manager [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Rebuilding instance [ 791.078779] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780255, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.120694] env[68244]: DEBUG nova.compute.manager [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.121863] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0d5c8c-86f7-4f8a-b6bc-60ba2eb47726 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.349940] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.756s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.350472] env[68244]: DEBUG nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 791.353137] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.929s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.353362] env[68244]: DEBUG nova.objects.instance [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lazy-loading 'resources' on Instance uuid bbc08614-926e-4209-abec-4808f223943a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 791.385087] env[68244]: DEBUG oslo_concurrency.lockutils [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-e2099d6d-5ab7-4a3e-8034-a3b4fc422749" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.385750] env[68244]: DEBUG nova.objects.instance [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lazy-loading 'flavor' on Instance uuid e2099d6d-5ab7-4a3e-8034-a3b4fc422749 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 791.396881] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780256, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476062} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.396881] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] f579141b-1fac-4541-99c3-07644a0a358c/f579141b-1fac-4541-99c3-07644a0a358c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.397294] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.397330] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7a77a37-d308-4167-a5dd-76c14b15156f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.405032] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 791.405032] env[68244]: value = "task-2780258" [ 791.405032] env[68244]: _type = "Task" [ 791.405032] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.413594] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780258, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.530568] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074662} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.530834] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 791.531629] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985c90dd-cfdc-4847-8d5a-5afabd7853aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.555253] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 791.555556] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be9b7c5c-3a9d-443e-9b99-6f798cae1e46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.582224] env[68244]: DEBUG oslo_vmware.api [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780255, 'name': PowerOnVM_Task, 'duration_secs': 0.720565} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.583571] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.583768] env[68244]: INFO nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Took 10.13 seconds to spawn the instance on the hypervisor. [ 791.583943] env[68244]: DEBUG nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.584343] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 791.584343] env[68244]: value = "task-2780259" [ 791.584343] env[68244]: _type = "Task" [ 791.584343] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.585459] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfef6cb-b986-467d-9143-850c961e078a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.601872] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780259, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.856788] env[68244]: DEBUG nova.compute.utils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 791.861547] env[68244]: DEBUG nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 791.861547] env[68244]: DEBUG nova.network.neutron [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.894708] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28109ae3-262b-45f7-9b51-d6106643bd01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.920740] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 791.924308] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce2a7a3a-c73d-4df7-a79a-10044c8eb3bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.933104] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780258, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06023} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.934022] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 791.934209] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298356ea-0a5d-483c-9835-f67f50a61524 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.940859] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "3776b39a-d10b-4068-8b4b-5dc25798e088" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.941187] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.941412] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "3776b39a-d10b-4068-8b4b-5dc25798e088-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.941608] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.941783] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.943452] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1998cf64-1cdc-47e3-b547-c9f60dfcd71c tempest-VolumesAssistedSnapshotsTest-987514587 tempest-VolumesAssistedSnapshotsTest-987514587-project-admin] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.293s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.944620] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 791.944620] env[68244]: value = "task-2780260" [ 791.944620] env[68244]: _type = "Task" [ 791.944620] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.945809] env[68244]: DEBUG nova.policy [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94887627a23e4ff09e0c530ef5b1afb5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4138f768f30b41d1983fc67959dec2e1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 791.948273] env[68244]: INFO nova.compute.manager [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Terminating instance [ 791.972852] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] f579141b-1fac-4541-99c3-07644a0a358c/f579141b-1fac-4541-99c3-07644a0a358c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 791.977032] env[68244]: DEBUG nova.compute.manager [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 791.977277] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 791.977959] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7887a944-ea64-4e97-b7fb-9291908c7c2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.993631] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662e4eb6-5b38-4a6b-9973-d6467b0c6701 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.000331] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780260, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.006103] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 792.008307] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61a1a290-1131-4489-9db2-3322040a49e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.010330] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 792.010330] env[68244]: value = "task-2780261" [ 792.010330] env[68244]: _type = "Task" [ 792.010330] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.022248] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780261, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.024877] env[68244]: DEBUG oslo_vmware.api [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 792.024877] env[68244]: value = "task-2780262" [ 792.024877] env[68244]: _type = "Task" [ 792.024877] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.032596] env[68244]: DEBUG oslo_vmware.api [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.108334] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780259, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.114989] env[68244]: INFO nova.compute.manager [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Took 43.12 seconds to build instance. [ 792.138666] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 792.138666] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07adda14-dee8-4ee6-bab9-26452f7d171f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.151188] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 792.151188] env[68244]: value = "task-2780263" [ 792.151188] env[68244]: _type = "Task" [ 792.151188] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.167133] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.361796] env[68244]: DEBUG nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 792.461393] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780260, 'name': PowerOffVM_Task, 'duration_secs': 0.386069} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.461393] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 792.466969] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Reconfiguring VM instance instance-0000001b to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 792.470682] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95d18df7-2798-45f1-bba6-8961e77b4018 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.494591] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 792.494591] env[68244]: value = "task-2780265" [ 792.494591] env[68244]: _type = "Task" [ 792.494591] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.505106] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780265, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.522371] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780261, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.551728] env[68244]: DEBUG oslo_vmware.api [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780262, 'name': PowerOffVM_Task, 'duration_secs': 0.338941} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.554773] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 792.555017] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 792.556326] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9279ec36-a48c-4333-a9d1-742adc1610db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.606953] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780259, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.618933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7300c0a-9e95-408c-b8b2-8bfa7758e6bb tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "f113bb6c-f05a-4253-98af-ca827fcbb723" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.316s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.630272] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 792.630637] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 792.630790] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Deleting the datastore file [datastore2] 3776b39a-d10b-4068-8b4b-5dc25798e088 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 792.631602] env[68244]: DEBUG nova.network.neutron [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Successfully created port: ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 792.633723] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a25531b-f5e8-4b23-b62a-bacb88de5a5d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.638250] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61d2dee-2705-4e1f-93a2-9515520476cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.643328] env[68244]: DEBUG oslo_vmware.api [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 792.643328] env[68244]: value = "task-2780267" [ 792.643328] env[68244]: _type = "Task" [ 792.643328] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.651252] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b4de55-59c2-4fda-9dfc-fc97593e25dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.657712] env[68244]: DEBUG oslo_vmware.api [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.666265] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780263, 'name': PowerOffVM_Task, 'duration_secs': 0.244205} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.693105] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 792.693416] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 792.695739] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7418fb7-2cf1-4f76-9861-db5e5732070b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.699047] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2cf3d9-d51a-4130-8bc2-09717653b4ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.710107] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b56bf2-d829-40bb-a2bc-4d697351eb4d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.715127] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 792.715389] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-890e65eb-7980-4623-8f12-6f47e01f501f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.730178] env[68244]: DEBUG nova.compute.provider_tree [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.881516] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.881879] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.006266] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780265, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.020255] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780261, 'name': ReconfigVM_Task, 'duration_secs': 0.597229} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.020538] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Reconfigured VM instance instance-00000026 to attach disk [datastore1] f579141b-1fac-4541-99c3-07644a0a358c/f579141b-1fac-4541-99c3-07644a0a358c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.021208] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-843b0ef5-1edb-4aef-89ba-1df92d9ad75f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.031242] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 793.031242] env[68244]: value = "task-2780269" [ 793.031242] env[68244]: _type = "Task" [ 793.031242] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.042894] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780269, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.100938] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780259, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.122615] env[68244]: DEBUG nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 793.131468] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 793.131468] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 793.131468] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleting the datastore file [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 793.131468] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f904f205-fcd9-4552-bc50-7d4760862dc6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.135631] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 793.135631] env[68244]: value = "task-2780270" [ 793.135631] env[68244]: _type = "Task" [ 793.135631] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.146281] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.159949] env[68244]: DEBUG oslo_vmware.api [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387767} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.160091] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 793.160222] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 793.160798] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 793.160798] env[68244]: INFO nova.compute.manager [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Took 1.18 seconds to destroy the instance on the hypervisor. [ 793.160994] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 793.161584] env[68244]: DEBUG nova.compute.manager [-] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 793.161668] env[68244]: DEBUG nova.network.neutron [-] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 793.233928] env[68244]: DEBUG nova.scheduler.client.report [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 793.372439] env[68244]: DEBUG nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 793.402791] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 793.403057] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.403249] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 793.403409] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.403553] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 793.403698] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 793.403950] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 793.404363] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 793.404620] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 793.404756] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 793.404929] env[68244]: DEBUG nova.virt.hardware [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 793.406428] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f77292-e38c-4c4e-89c5-aaa253e69157 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.415541] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3c00e0-29d1-49b7-95bf-532ab19d3efe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.507370] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780265, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.551536] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780269, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.603574] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780259, 'name': ReconfigVM_Task, 'duration_secs': 1.540117} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.604348] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Reconfigured VM instance instance-00000025 to attach disk [datastore1] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.604893] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa8ef4ec-baff-4d61-af4d-b7c3fb34a30e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.615322] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 793.615322] env[68244]: value = "task-2780271" [ 793.615322] env[68244]: _type = "Task" [ 793.615322] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.625096] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780271, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.646495] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15318} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.647250] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 793.647771] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 793.647843] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 793.654431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.741189] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.388s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.744483] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 31.248s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.767327] env[68244]: INFO nova.scheduler.client.report [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Deleted allocations for instance bbc08614-926e-4209-abec-4808f223943a [ 794.005816] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780265, 'name': ReconfigVM_Task, 'duration_secs': 1.442454} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.006274] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Reconfigured VM instance instance-0000001b to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 794.006506] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 794.006769] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45985016-740b-4631-b763-8ee213deed69 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.015594] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 794.015594] env[68244]: value = "task-2780272" [ 794.015594] env[68244]: _type = "Task" [ 794.015594] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.027866] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.043608] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780269, 'name': Rename_Task, 'duration_secs': 1.005564} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.044010] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 794.044235] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc3da977-c764-4f9e-8452-e6362ec5d8ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.052438] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 794.052438] env[68244]: value = "task-2780273" [ 794.052438] env[68244]: _type = "Task" [ 794.052438] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.070468] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.125422] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780271, 'name': Rename_Task, 'duration_secs': 0.158854} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.125764] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 794.126039] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e02142c-d904-43ce-b524-2446025be828 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.134525] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 794.134525] env[68244]: value = "task-2780274" [ 794.134525] env[68244]: _type = "Task" [ 794.134525] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.151392] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.218350] env[68244]: DEBUG nova.compute.manager [req-ee4a4327-a3f5-4ab3-83f7-25577eb47d6a req-c5e35136-9603-4d02-b4c7-0a4da6fb9449 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Received event network-vif-deleted-57f7dda3-98ee-46c7-871d-37b0add34372 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 794.218405] env[68244]: INFO nova.compute.manager [req-ee4a4327-a3f5-4ab3-83f7-25577eb47d6a req-c5e35136-9603-4d02-b4c7-0a4da6fb9449 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Neutron deleted interface 57f7dda3-98ee-46c7-871d-37b0add34372; detaching it from the instance and deleting it from the info cache [ 794.218580] env[68244]: DEBUG nova.network.neutron [req-ee4a4327-a3f5-4ab3-83f7-25577eb47d6a req-c5e35136-9603-4d02-b4c7-0a4da6fb9449 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.224365] env[68244]: INFO nova.compute.manager [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Rebuilding instance [ 794.279902] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3943ea15-36cd-4324-a333-8cbe1e954b68 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036 tempest-FloatingIPsAssociationNegativeTestJSON-1739726036-project-member] Lock "bbc08614-926e-4209-abec-4808f223943a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.485s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.288239] env[68244]: DEBUG nova.compute.manager [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.289447] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be07c81f-4a67-467d-8ba9-7b62e88992c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.464087] env[68244]: DEBUG nova.network.neutron [-] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.530747] env[68244]: DEBUG oslo_vmware.api [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780272, 'name': PowerOnVM_Task, 'duration_secs': 0.453193} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.534535] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.534915] env[68244]: DEBUG nova.compute.manager [None req-57ab482c-f678-4f5f-9d19-e10b4aff8366 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.536338] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fb102c-58fc-44eb-a222-ce60f034d476 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.573987] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780273, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.651947] env[68244]: DEBUG oslo_vmware.api [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780274, 'name': PowerOnVM_Task, 'duration_secs': 0.500375} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.651947] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.651947] env[68244]: INFO nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Took 10.45 seconds to spawn the instance on the hypervisor. [ 794.651947] env[68244]: DEBUG nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.652373] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77e25af-fbc3-480d-b7fb-1b7dec08a80c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.705023] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 794.705288] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.705448] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 794.705977] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.705977] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 794.705977] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 794.706190] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 794.706283] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 794.706451] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 794.706662] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 794.706898] env[68244]: DEBUG nova.virt.hardware [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 794.707832] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219d00ea-1a87-464f-a061-5076f0a5e201 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.724960] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03092988-bdc4-401d-a667-3cf9241dbe77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.728690] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6555d7fd-37ba-4811-90d6-796a45413853 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.749363] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:31:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d483b86-624f-47ef-844a-5e5c7bf1d4ad', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.756764] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.760717] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.762200] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-730e372e-249e-45ec-b97d-f7a2ef44718f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.779206] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec40e19-5ac4-4a3c-8535-a4b4bc3eea2c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.799847] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.799847] env[68244]: value = "task-2780275" [ 794.799847] env[68244]: _type = "Task" [ 794.799847] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.823939] env[68244]: DEBUG nova.compute.manager [req-ee4a4327-a3f5-4ab3-83f7-25577eb47d6a req-c5e35136-9603-4d02-b4c7-0a4da6fb9449 service nova] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Detach interface failed, port_id=57f7dda3-98ee-46c7-871d-37b0add34372, reason: Instance 3776b39a-d10b-4068-8b4b-5dc25798e088 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 794.829848] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780275, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.955274] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b481e8-a9e3-4442-acce-4760cdd97c6e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.961287] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e476589-8fe0-43cb-900a-fa74b8f7d16e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.966892] env[68244]: INFO nova.compute.manager [-] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Took 1.80 seconds to deallocate network for instance. [ 794.998897] env[68244]: DEBUG nova.network.neutron [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Successfully updated port: ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 795.000527] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5ea197-c3f3-4f88-a8f8-39884acb5279 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.003796] env[68244]: DEBUG nova.compute.manager [req-ee37bd4f-216d-416b-a48e-46f4599f50af req-925c32fb-4030-4731-8aa5-376eaa6116af service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Received event network-vif-plugged-ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 795.004433] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee37bd4f-216d-416b-a48e-46f4599f50af req-925c32fb-4030-4731-8aa5-376eaa6116af service nova] Acquiring lock "f5724973-2349-481c-b2ba-d1287f09c1db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.004433] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee37bd4f-216d-416b-a48e-46f4599f50af req-925c32fb-4030-4731-8aa5-376eaa6116af service nova] Lock "f5724973-2349-481c-b2ba-d1287f09c1db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.004433] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee37bd4f-216d-416b-a48e-46f4599f50af req-925c32fb-4030-4731-8aa5-376eaa6116af service nova] Lock "f5724973-2349-481c-b2ba-d1287f09c1db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.004625] env[68244]: DEBUG nova.compute.manager [req-ee37bd4f-216d-416b-a48e-46f4599f50af req-925c32fb-4030-4731-8aa5-376eaa6116af service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] No waiting events found dispatching network-vif-plugged-ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 795.004690] env[68244]: WARNING nova.compute.manager [req-ee37bd4f-216d-416b-a48e-46f4599f50af req-925c32fb-4030-4731-8aa5-376eaa6116af service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Received unexpected event network-vif-plugged-ea8b9298-593f-4aec-a795-9d1e6163a01f for instance with vm_state building and task_state spawning. [ 795.011719] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7dc516-1d22-485d-a039-23e82338f49f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.032546] env[68244]: DEBUG nova.compute.provider_tree [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 795.074059] env[68244]: DEBUG oslo_vmware.api [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780273, 'name': PowerOnVM_Task, 'duration_secs': 0.567287} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.074460] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 795.074525] env[68244]: INFO nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Took 8.09 seconds to spawn the instance on the hypervisor. [ 795.074688] env[68244]: DEBUG nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 795.075725] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8bc47c-f18a-48f6-a0d1-c7145dc8f216 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.186500] env[68244]: INFO nova.compute.manager [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Took 42.35 seconds to build instance. [ 795.312567] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780275, 'name': CreateVM_Task, 'duration_secs': 0.383093} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.312743] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.313452] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.313619] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.313935] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.314260] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24e84599-fc78-48b2-827e-f6336677d2dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.321645] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 795.321645] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5275c3b2-4b09-4198-bd58-9dea6b89594b" [ 795.321645] env[68244]: _type = "Task" [ 795.321645] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.326120] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.326520] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d255fca-5116-4473-8628-2d0c5207ac1d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.335039] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5275c3b2-4b09-4198-bd58-9dea6b89594b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.336500] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 795.336500] env[68244]: value = "task-2780276" [ 795.336500] env[68244]: _type = "Task" [ 795.336500] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.348155] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.506499] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.506853] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.509290] env[68244]: DEBUG nova.network.neutron [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.509496] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.560359] env[68244]: ERROR nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [req-93fdd491-90b7-4d0c-87ea-50a9674f3d15] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-93fdd491-90b7-4d0c-87ea-50a9674f3d15"}]} [ 795.587663] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 795.600947] env[68244]: INFO nova.compute.manager [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Took 38.48 seconds to build instance. [ 795.615525] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 795.615525] env[68244]: DEBUG nova.compute.provider_tree [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 795.641034] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 795.662625] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 795.691350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a79640ff-34df-422c-8a45-148965b159e7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.054s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.834253] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5275c3b2-4b09-4198-bd58-9dea6b89594b, 'name': SearchDatastore_Task, 'duration_secs': 0.022837} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.834561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.834815] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.835137] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.835237] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.835438] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.838931] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bcfdb0e-49d9-4549-ab4b-a7e10ee60256 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.855467] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780276, 'name': PowerOffVM_Task, 'duration_secs': 0.380153} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.862323] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 795.863029] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 795.864740] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.864740] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.865389] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd7669f-ca83-4257-991c-ec2a74b461ee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.869503] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9cf75d8-bfce-4924-8f78-053adddf4c27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.883179] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 795.883179] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c08b57-29e4-9536-95b7-647399b87db6" [ 795.883179] env[68244]: _type = "Task" [ 795.883179] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.886533] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 795.889921] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac874df2-1d76-4567-93bc-2e824dfb76a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.898867] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c08b57-29e4-9536-95b7-647399b87db6, 'name': SearchDatastore_Task, 'duration_secs': 0.01884} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.899679] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85333f94-15df-4233-bf40-8c9baa9eb5d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.909347] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 795.909347] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a84f5b-2cc8-3c67-45d9-9cdc3d06f978" [ 795.909347] env[68244]: _type = "Task" [ 795.909347] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.918953] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a84f5b-2cc8-3c67-45d9-9cdc3d06f978, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.919941] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 795.920239] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 795.920317] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleting the datastore file [datastore1] f113bb6c-f05a-4253-98af-ca827fcbb723 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.921273] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4a5a6ed-6197-42d9-9ed7-92a93aaa4712 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.931729] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 795.931729] env[68244]: value = "task-2780278" [ 795.931729] env[68244]: _type = "Task" [ 795.931729] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.942894] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.086788] env[68244]: DEBUG nova.network.neutron [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.106138] env[68244]: DEBUG oslo_concurrency.lockutils [None req-58fa3127-19af-4bf5-afb2-34485fc11282 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "f579141b-1fac-4541-99c3-07644a0a358c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.792s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.194535] env[68244]: DEBUG nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.211345] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.211661] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.211865] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.212059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.212820] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.214743] env[68244]: INFO nova.compute.manager [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Terminating instance [ 796.361170] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5105297e-b375-4bbf-8edd-665e9d083c58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.370122] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139a6876-8ccf-4220-88dc-63bffb44bdce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.405620] env[68244]: DEBUG nova.network.neutron [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updating instance_info_cache with network_info: [{"id": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "address": "fa:16:3e:60:2d:09", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b9298-59", "ovs_interfaceid": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.407349] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c184e3f2-926d-4817-8afa-f0b7924eeef9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.422517] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223c4b0e-61f2-4ddb-9c7a-0ef207110a09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.426761] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a84f5b-2cc8-3c67-45d9-9cdc3d06f978, 'name': SearchDatastore_Task, 'duration_secs': 0.023966} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.427094] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.427321] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.427921] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a01e6e4-537c-4754-bc82-02b4c6552333 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.440922] env[68244]: DEBUG nova.compute.provider_tree [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.452899] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 796.452899] env[68244]: value = "task-2780279" [ 796.452899] env[68244]: _type = "Task" [ 796.452899] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.460715] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.461346] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.461526] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 796.461695] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 796.467579] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.612018] env[68244]: DEBUG nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.720188] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.720511] env[68244]: DEBUG nova.compute.manager [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.720919] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.721616] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe6b858-6eb3-4502-ba0a-195f4fe3844b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.731277] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.731631] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-717b1734-43f0-4550-84a4-d00ebe8a3aba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.742489] env[68244]: DEBUG oslo_vmware.api [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 796.742489] env[68244]: value = "task-2780280" [ 796.742489] env[68244]: _type = "Task" [ 796.742489] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.754436] env[68244]: DEBUG oslo_vmware.api [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2780280, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.913742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Releasing lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.913742] env[68244]: DEBUG nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Instance network_info: |[{"id": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "address": "fa:16:3e:60:2d:09", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b9298-59", "ovs_interfaceid": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 796.913742] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:2d:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c405e9f-a6c8-4308-acac-071654efe18e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea8b9298-593f-4aec-a795-9d1e6163a01f', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 796.921615] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 796.921976] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 796.922331] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b89688f-a43b-4bdd-9969-4b46049b2b0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.952025] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.952025] env[68244]: value = "task-2780281" [ 796.952025] env[68244]: _type = "Task" [ 796.952025] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.974636] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780279, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.979171] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780281, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.980613] env[68244]: ERROR nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [req-4504a138-5ab7-4e27-b178-f98e82668f85] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4504a138-5ab7-4e27-b178-f98e82668f85"}]} [ 797.003462] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 797.024398] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 797.025191] env[68244]: DEBUG nova.compute.provider_tree [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.043843] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 797.077627] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 797.140779] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.255100] env[68244]: DEBUG oslo_vmware.api [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2780280, 'name': PowerOffVM_Task, 'duration_secs': 0.396161} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.261247] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.261247] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 797.261247] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ef0619f-55f0-4fcd-90ff-eac62fa182d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.283040] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "df4674a2-87de-4507-950a-5941fae93aab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.284429] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "df4674a2-87de-4507-950a-5941fae93aab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.337806] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 797.338131] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 797.338351] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Deleting the datastore file [datastore2] 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 797.338634] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac66e029-55f6-4175-830c-2d1f4d83d385 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.349958] env[68244]: DEBUG oslo_vmware.api [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for the task: (returnval){ [ 797.349958] env[68244]: value = "task-2780283" [ 797.349958] env[68244]: _type = "Task" [ 797.349958] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.363328] env[68244]: DEBUG oslo_vmware.api [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2780283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.434028] env[68244]: DEBUG nova.compute.manager [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Received event network-changed-ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 797.434270] env[68244]: DEBUG nova.compute.manager [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Refreshing instance network info cache due to event network-changed-ea8b9298-593f-4aec-a795-9d1e6163a01f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 797.434873] env[68244]: DEBUG oslo_concurrency.lockutils [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] Acquiring lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.434873] env[68244]: DEBUG oslo_concurrency.lockutils [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] Acquired lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.434873] env[68244]: DEBUG nova.network.neutron [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Refreshing network info cache for port ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.469229] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780281, 'name': CreateVM_Task, 'duration_secs': 0.434807} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.471862] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 797.477517] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.477715] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.478073] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 797.478786] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780279, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62257} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.481676] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fe97f4d-30f8-41a4-bd07-c020011c7af7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.486854] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.487127] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.488077] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-199d4744-478b-4383-a89d-f3308463ef47 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.493769] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 797.493769] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528a715f-fc63-76e7-4424-60ba80afa62e" [ 797.493769] env[68244]: _type = "Task" [ 797.493769] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.498739] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 797.498739] env[68244]: value = "task-2780284" [ 797.498739] env[68244]: _type = "Task" [ 797.498739] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.511023] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528a715f-fc63-76e7-4424-60ba80afa62e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.515742] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.525169] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.525169] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.525169] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.525325] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.525590] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.525590] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.526455] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.526455] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 797.526455] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 797.526455] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 797.526455] env[68244]: DEBUG nova.virt.hardware [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 797.530634] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64471c44-50a7-4f42-9c51-2e128b4087ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.542606] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a07645-0fa2-4b73-8086-ed75c95833a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.560373] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.566440] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.569415] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.569887] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df0242b1-977f-435d-9cdc-7a8e70935f01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.591485] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.591485] env[68244]: value = "task-2780285" [ 797.591485] env[68244]: _type = "Task" [ 797.591485] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.600158] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780285, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.777405] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c5d5cd-d900-4ef2-a665-69948bece06a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.789578] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281641ee-593b-435e-b6f9-ec3487c3d41b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.821416] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be52aad8-2d71-498e-88e2-959c19b91fa7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.830690] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c9dd37-fa1f-42ee-8d4e-50f3f01d5c3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.845876] env[68244]: DEBUG nova.compute.provider_tree [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.860642] env[68244]: DEBUG oslo_vmware.api [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Task: {'id': task-2780283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39458} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.860642] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.860787] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.860905] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.861098] env[68244]: INFO nova.compute.manager [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 797.861361] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.861559] env[68244]: DEBUG nova.compute.manager [-] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.861649] env[68244]: DEBUG nova.network.neutron [-] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 798.013830] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.188483} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.017812] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.018269] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528a715f-fc63-76e7-4424-60ba80afa62e, 'name': SearchDatastore_Task, 'duration_secs': 0.02331} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.019036] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d48ccc-53d8-41d6-82a5-130489293ea6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.022101] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.022387] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.022682] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.022864] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.023087] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.023383] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3921ac7f-4bfd-4fd9-82df-d73e3d96478b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.057677] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.062509] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e334097-a915-4feb-9f64-6729a643b0ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.080433] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.080720] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.087535] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10a40936-ed9e-417f-b594-03e08b0e200c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.091839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.092642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.100570] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 798.100570] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b8e4c2-ac25-fb1f-bc52-5815c8085e61" [ 798.100570] env[68244]: _type = "Task" [ 798.100570] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.101197] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 798.101197] env[68244]: value = "task-2780286" [ 798.101197] env[68244]: _type = "Task" [ 798.101197] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.110486] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780285, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.116284] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b8e4c2-ac25-fb1f-bc52-5815c8085e61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.120286] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.235587] env[68244]: DEBUG nova.network.neutron [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updated VIF entry in instance network info cache for port ea8b9298-593f-4aec-a795-9d1e6163a01f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.235587] env[68244]: DEBUG nova.network.neutron [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updating instance_info_cache with network_info: [{"id": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "address": "fa:16:3e:60:2d:09", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b9298-59", "ovs_interfaceid": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.373792] env[68244]: ERROR nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [req-7b75bfc0-6dcc-4320-827e-0a41d9503566] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7b75bfc0-6dcc-4320-827e-0a41d9503566"}]} [ 798.400307] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 798.417398] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 798.420090] env[68244]: DEBUG nova.compute.provider_tree [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.433139] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 798.454987] env[68244]: DEBUG nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 798.605817] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780285, 'name': CreateVM_Task, 'duration_secs': 0.54806} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.614146] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.617837] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.618039] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.618361] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 798.620823] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf6e55f0-4e40-4b9d-a8ac-620d9d8c3d86 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.625164] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b8e4c2-ac25-fb1f-bc52-5815c8085e61, 'name': SearchDatastore_Task, 'duration_secs': 0.024464} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.631764] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780286, 'name': ReconfigVM_Task, 'duration_secs': 0.312699} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.632640] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 798.632640] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dbfbcd-751d-2153-0727-226be78b0602" [ 798.632640] env[68244]: _type = "Task" [ 798.632640] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.632640] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-314df933-133d-41ea-9291-936aaf604fcc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.634698] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Reconfigured VM instance instance-00000023 to attach disk [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c/c9f5fbeb-28b6-4b41-9156-5b90bc19977c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.635463] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9752b2ff-0782-46a4-8136-a44141decc3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.645976] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 798.645976] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521bd658-6270-59e8-5c0c-99a8dbd77e46" [ 798.645976] env[68244]: _type = "Task" [ 798.645976] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.649825] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 798.649825] env[68244]: value = "task-2780287" [ 798.649825] env[68244]: _type = "Task" [ 798.649825] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.650048] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dbfbcd-751d-2153-0727-226be78b0602, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.662424] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780287, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.665497] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521bd658-6270-59e8-5c0c-99a8dbd77e46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.737609] env[68244]: DEBUG oslo_concurrency.lockutils [req-4b1c65b3-3ef4-484d-84d4-34392c7e46f2 req-ac27937d-5c31-483e-913c-3b52baf980cf service nova] Releasing lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.092947] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a158123b-bf2f-4019-bc56-2398108a46f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.101958] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cd7709-7051-4269-809a-9724dc57c4b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.135273] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61d954b-c0d4-48dd-841e-85d74628fa0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.150060] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cd76f6-2f3e-4c52-8a86-c15247708805 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.153964] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52dbfbcd-751d-2153-0727-226be78b0602, 'name': SearchDatastore_Task, 'duration_secs': 0.038092} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.154384] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.154614] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.154840] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.167359] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521bd658-6270-59e8-5c0c-99a8dbd77e46, 'name': SearchDatastore_Task, 'duration_secs': 0.037157} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.181523] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.181523] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f5724973-2349-481c-b2ba-d1287f09c1db/f5724973-2349-481c-b2ba-d1287f09c1db.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.181523] env[68244]: DEBUG nova.compute.provider_tree [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 799.181523] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780287, 'name': Rename_Task, 'duration_secs': 0.155061} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.182067] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.182067] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.182247] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f209e15-daad-48ec-aad6-f6b6e6ca4298 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.188767] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.189184] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55b5e4e0-e742-4b01-9acd-4c8c98c4f16b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.190875] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88017741-6515-4e23-85f2-6575241aaf27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.199673] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 799.199673] env[68244]: value = "task-2780289" [ 799.199673] env[68244]: _type = "Task" [ 799.199673] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.201117] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 799.201117] env[68244]: value = "task-2780288" [ 799.201117] env[68244]: _type = "Task" [ 799.201117] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.205167] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.205167] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 799.209472] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c92c8b4-b5d1-45af-b479-84e8e1dff26a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.219656] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.222328] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 799.222328] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa9e56-9583-91f8-921d-3c94f74d41d0" [ 799.222328] env[68244]: _type = "Task" [ 799.222328] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.222544] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.231175] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa9e56-9583-91f8-921d-3c94f74d41d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.397254] env[68244]: DEBUG nova.network.neutron [-] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.727662] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780288, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.731619] env[68244]: ERROR nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf"}]} [ 799.732282] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 5.989s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.735302] env[68244]: ERROR nova.compute.manager [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Confirm resize failed on source host cpu-1. Resource allocations in the placement service will be removed regardless because the instance is now on the destination host cpu-1. You can try hard rebooting the instance to correct its state.: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 (generation 69): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf"}]} [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Traceback (most recent call last): [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/manager.py", line 5279, in do_confirm_resize [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._confirm_resize( [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/manager.py", line 5364, in _confirm_resize [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self.rt.drop_move_claim_at_source(context, instance, migration) [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] return f(*args, **kwargs) [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 606, in drop_move_claim_at_source [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._drop_move_claim( [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 677, in _drop_move_claim [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._update(ctxt, self.compute_nodes[nodename]) [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._update_to_placement(context, compute_node, startup) [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] raise attempt.get() [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] six.reraise(self.value[0], self.value[1], self.value[2]) [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] raise value [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self.reportclient.update_from_provider_tree( [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self.set_inventory_for_provider( [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] raise exception.ResourceProviderUpdateConflict( [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 (generation 69): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf"}]} [ 799.735302] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] [ 799.741457] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780289, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.741457] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.371s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.741457] env[68244]: DEBUG nova.objects.instance [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lazy-loading 'resources' on Instance uuid 09ab8712-0f7a-4122-9d61-19da3e65d22b {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 799.750105] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa9e56-9583-91f8-921d-3c94f74d41d0, 'name': SearchDatastore_Task, 'duration_secs': 0.016023} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.752053] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8a3c79b-33e2-4845-97c6-6638b7d416db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.761955] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 799.761955] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b28d58-da9c-8f6d-e228-172ef91f7972" [ 799.761955] env[68244]: _type = "Task" [ 799.761955] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.771705] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b28d58-da9c-8f6d-e228-172ef91f7972, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.787985] env[68244]: DEBUG nova.compute.manager [req-2b87b9c7-22a3-49b5-9dff-ac232a83e348 req-d241ef9c-8dbc-4032-83b0-0b4a832d669a service nova] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Received event network-vif-deleted-a07f522b-44ee-4a87-ac21-b5407bf48ff2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 799.807571] env[68244]: INFO nova.scheduler.client.report [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Deleted allocation for migration 708441f4-9a09-4c99-bfc8-42d73de28a7f [ 799.899146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.900028] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.901294] env[68244]: INFO nova.compute.manager [-] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Took 2.04 seconds to deallocate network for instance. [ 800.214453] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780289, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558646} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.214736] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f5724973-2349-481c-b2ba-d1287f09c1db/f5724973-2349-481c-b2ba-d1287f09c1db.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.214955] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.215220] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8be5f453-c27e-43ab-bf71-347bc4b33151 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.229352] env[68244]: DEBUG oslo_vmware.api [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780288, 'name': PowerOnVM_Task, 'duration_secs': 0.671925} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.230323] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 800.230570] env[68244]: DEBUG nova.compute.manager [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.230979] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 800.230979] env[68244]: value = "task-2780290" [ 800.230979] env[68244]: _type = "Task" [ 800.230979] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.232273] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76461e26-10f3-4a5d-b721-c03395f99ca9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.243573] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780290, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.267832] env[68244]: DEBUG nova.scheduler.client.report [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 800.276756] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b28d58-da9c-8f6d-e228-172ef91f7972, 'name': SearchDatastore_Task, 'duration_secs': 0.020402} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.277080] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.277384] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 800.277613] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6de24e0-08ee-4e5c-aacb-50558838e90f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.283135] env[68244]: DEBUG nova.scheduler.client.report [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 800.283321] env[68244]: DEBUG nova.compute.provider_tree [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.291095] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 800.291095] env[68244]: value = "task-2780291" [ 800.291095] env[68244]: _type = "Task" [ 800.291095] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.295821] env[68244]: DEBUG nova.scheduler.client.report [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 800.303734] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.314992] env[68244]: ERROR nova.compute.manager [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Setting instance vm_state to ERROR: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 (generation 69): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf"}]} [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Traceback (most recent call last): [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/manager.py", line 11390, in _error_out_instance_on_exception [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] yield [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/manager.py", line 5285, in do_confirm_resize [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] with excutils.save_and_reraise_exception(logger=LOG): [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self.force_reraise() [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] raise self.value [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/manager.py", line 5279, in do_confirm_resize [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._confirm_resize( [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/manager.py", line 5364, in _confirm_resize [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self.rt.drop_move_claim_at_source(context, instance, migration) [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] return f(*args, **kwargs) [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 606, in drop_move_claim_at_source [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._drop_move_claim( [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 677, in _drop_move_claim [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._update(ctxt, self.compute_nodes[nodename]) [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self._update_to_placement(context, compute_node, startup) [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] raise attempt.get() [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] six.reraise(self.value[0], self.value[1], self.value[2]) [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] raise value [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self.reportclient.update_from_provider_tree( [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 800.314992] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] self.set_inventory_for_provider( [ 800.316012] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 800.316012] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] raise exception.ResourceProviderUpdateConflict( [ 800.316012] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 (generation 69): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf"}]} [ 800.316012] env[68244]: ERROR nova.compute.manager [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] [ 800.318270] env[68244]: DEBUG nova.scheduler.client.report [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 800.411526] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.743934] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780290, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077234} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.744846] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 800.745205] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8d4ad6-8d89-4498-b016-067452ddcb72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.776035] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] f5724973-2349-481c-b2ba-d1287f09c1db/f5724973-2349-481c-b2ba-d1287f09c1db.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.779839] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c618ad3b-3eef-49ba-b2ee-74b49ab95261 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.794840] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.805086] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.807261] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 800.807261] env[68244]: value = "task-2780292" [ 800.807261] env[68244]: _type = "Task" [ 800.807261] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.807957] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bedc068-9a6e-4da8-99ee-25a424d57872 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.823425] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae40c0b-3a29-4f3b-9642-f52c7f5eeffc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.827308] env[68244]: DEBUG oslo_concurrency.lockutils [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 41.297s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.828885] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780292, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.866034] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9589124-4493-4309-b936-9419e31f793d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.874548] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6666c4a9-eafc-4ef3-8d94-3599c813e8ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.892448] env[68244]: DEBUG nova.compute.provider_tree [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.307841] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780291, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.322849] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780292, 'name': ReconfigVM_Task, 'duration_secs': 0.297396} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.323224] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Reconfigured VM instance instance-00000027 to attach disk [datastore2] f5724973-2349-481c-b2ba-d1287f09c1db/f5724973-2349-481c-b2ba-d1287f09c1db.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.324283] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfab0429-0b16-4654-aa5d-b895b0a808b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.335981] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 801.335981] env[68244]: value = "task-2780293" [ 801.335981] env[68244]: _type = "Task" [ 801.335981] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.346439] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780293, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.427173] env[68244]: DEBUG nova.scheduler.client.report [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 71 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 801.427173] env[68244]: DEBUG nova.compute.provider_tree [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 71 to 72 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 801.427173] env[68244]: DEBUG nova.compute.provider_tree [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.806126] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780291, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.065325} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.806585] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 801.806585] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.806850] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5a26330-ff74-4244-ba26-c1c12bfde0cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.814912] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 801.814912] env[68244]: value = "task-2780294" [ 801.814912] env[68244]: _type = "Task" [ 801.814912] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.824645] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.848346] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780293, 'name': Rename_Task, 'duration_secs': 0.163178} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.848346] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.848346] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-107db5bc-35ed-403f-9c32-2f489ff99fa4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.856106] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 801.856106] env[68244]: value = "task-2780295" [ 801.856106] env[68244]: _type = "Task" [ 801.856106] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.864700] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.932367] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.191s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.934949] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.475s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.935252] env[68244]: DEBUG nova.objects.instance [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lazy-loading 'resources' on Instance uuid b0090ea8-98fe-42a0-97cc-40d7578851a9 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 801.965819] env[68244]: INFO nova.scheduler.client.report [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Deleted allocations for instance 09ab8712-0f7a-4122-9d61-19da3e65d22b [ 802.060066] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.060595] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.063033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.063033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.063033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.066249] env[68244]: INFO nova.compute.manager [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Terminating instance [ 802.331266] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071334} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.331266] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.332988] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439f4702-d87a-4156-b61f-6867128ef9e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.370945] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server [None req-566968e5-0faf-4c55-8ea7-9dac42d51bfe tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Exception during message handling: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 (generation 69): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf"}]} [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server raise self.value [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 147, in decorated_function [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server raise self.value [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 5308, in confirm_resize [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server do_confirm_resize(context, instance, migration) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 5285, in do_confirm_resize [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(logger=LOG): [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server raise self.value [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 5279, in do_confirm_resize [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self._confirm_resize( [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 5364, in _confirm_resize [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self.rt.drop_move_claim_at_source(context, instance, migration) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 606, in drop_move_claim_at_source [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self._drop_move_claim( [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 677, in _drop_move_claim [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self._update(ctxt, self.compute_nodes[nodename]) [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 802.372432] env[68244]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server raise attempt.get() [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server raise value [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateConflict( [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 (generation 69): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e60ec37-72fb-4552-b932-1acc5b4fc3cf"}]} [ 802.373829] env[68244]: ERROR oslo_messaging.rpc.server [ 802.373829] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d94fd587-28dd-4a2b-b765-b8a74fdce004 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.399344] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780295, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.399780] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 802.399780] env[68244]: value = "task-2780296" [ 802.399780] env[68244]: _type = "Task" [ 802.399780] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.409591] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780296, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.475108] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79a88928-bc76-4ada-85e2-8eeca1ba044a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "09ab8712-0f7a-4122-9d61-19da3e65d22b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.240s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.576018] env[68244]: DEBUG nova.compute.manager [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 802.576018] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.576018] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6562a9-0a16-4757-83e6-14d6fd5f682c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.587213] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.587509] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-815ffa78-3179-43da-997d-304dcdfe897a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.595229] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "2487689d-7a83-49d7-be78-fbb946ebef8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.595536] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.598915] env[68244]: DEBUG oslo_vmware.api [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 802.598915] env[68244]: value = "task-2780297" [ 802.598915] env[68244]: _type = "Task" [ 802.598915] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.612223] env[68244]: DEBUG oslo_vmware.api [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.621405] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "d74a0d56-8656-429c-a703-fca87e07798f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.624019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.883384] env[68244]: DEBUG oslo_vmware.api [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780295, 'name': PowerOnVM_Task, 'duration_secs': 0.797893} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.885797] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.886016] env[68244]: INFO nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Took 9.51 seconds to spawn the instance on the hypervisor. [ 802.886349] env[68244]: DEBUG nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 802.887251] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5209ec65-91d2-4fb5-b907-8755a160ef5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.909599] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780296, 'name': ReconfigVM_Task, 'duration_secs': 0.377395} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.912198] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Reconfigured VM instance instance-00000024 to attach disk [datastore2] f113bb6c-f05a-4253-98af-ca827fcbb723/f113bb6c-f05a-4253-98af-ca827fcbb723.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.913074] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b14ad0b0-0133-4ec0-837f-843c765b975b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.920815] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 802.920815] env[68244]: value = "task-2780298" [ 802.920815] env[68244]: _type = "Task" [ 802.920815] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.935586] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780298, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.003136] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.003482] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.003729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.003918] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.004118] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.006621] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9767927d-7736-4c1e-902d-1cb92aac3eaf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.009638] env[68244]: INFO nova.compute.manager [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Terminating instance [ 803.017734] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6195b4b-2f0a-46ae-9dc0-d5b0e89652ee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.054402] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f4aca9-b656-4f0a-8914-6c5416c53c75 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.063883] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263c62ab-45ed-491d-9b23-97ef5eeb946b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.080527] env[68244]: DEBUG nova.compute.provider_tree [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.108338] env[68244]: DEBUG oslo_vmware.api [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780297, 'name': PowerOffVM_Task, 'duration_secs': 0.429464} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.108605] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.108770] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.109034] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9418c7cf-55db-4e54-a6a8-781d55c07fdc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.197621] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.197845] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.198084] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleting the datastore file [datastore2] c9f5fbeb-28b6-4b41-9156-5b90bc19977c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.198366] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7927ac54-53ca-45f1-a4af-11c5acab21a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.206127] env[68244]: DEBUG oslo_vmware.api [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 803.206127] env[68244]: value = "task-2780300" [ 803.206127] env[68244]: _type = "Task" [ 803.206127] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.214554] env[68244]: DEBUG oslo_vmware.api [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.407508] env[68244]: INFO nova.compute.manager [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Took 44.30 seconds to build instance. [ 803.432510] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780298, 'name': Rename_Task, 'duration_secs': 0.149886} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.432793] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.433050] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-001c86f7-3e22-481f-9d1f-28d6cffacaeb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.443922] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 803.443922] env[68244]: value = "task-2780301" [ 803.443922] env[68244]: _type = "Task" [ 803.443922] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.459370] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780301, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.515620] env[68244]: DEBUG nova.compute.manager [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 803.515864] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 803.516824] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84eb959-f19a-47ab-969f-d32fafe46b27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.526085] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 803.526396] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-beeb889f-8ddc-4124-b5d2-0b08bace66c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.551694] env[68244]: DEBUG oslo_vmware.api [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 803.551694] env[68244]: value = "task-2780302" [ 803.551694] env[68244]: _type = "Task" [ 803.551694] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.561894] env[68244]: DEBUG oslo_vmware.api [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.584407] env[68244]: DEBUG nova.scheduler.client.report [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.720452] env[68244]: DEBUG oslo_vmware.api [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13946} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.720731] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.720958] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.721175] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.721449] env[68244]: INFO nova.compute.manager [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 803.721611] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.721805] env[68244]: DEBUG nova.compute.manager [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.721899] env[68244]: DEBUG nova.network.neutron [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.907367] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5ad6b5ab-8cd8-4828-a6bd-16d32c9f7c17 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "f5724973-2349-481c-b2ba-d1287f09c1db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.892s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.954773] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780301, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.063760] env[68244]: DEBUG oslo_vmware.api [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780302, 'name': PowerOffVM_Task, 'duration_secs': 0.328669} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.064209] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 804.064950] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 804.065205] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f32c4a9-7397-404b-b63d-b485a317073f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.090552] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.092954] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 36.810s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.128456] env[68244]: INFO nova.scheduler.client.report [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Deleted allocations for instance b0090ea8-98fe-42a0-97cc-40d7578851a9 [ 804.138904] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 804.139047] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 804.139552] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Deleting the datastore file [datastore2] 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 804.139781] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2bbf892-0a56-4386-b1b2-447422749392 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.151291] env[68244]: DEBUG oslo_vmware.api [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for the task: (returnval){ [ 804.151291] env[68244]: value = "task-2780304" [ 804.151291] env[68244]: _type = "Task" [ 804.151291] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.162517] env[68244]: DEBUG oslo_vmware.api [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.410653] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 804.458379] env[68244]: DEBUG oslo_vmware.api [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780301, 'name': PowerOnVM_Task, 'duration_secs': 0.788619} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.458706] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 804.458959] env[68244]: DEBUG nova.compute.manager [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 804.459868] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d57d49d-7c8a-4c10-931b-823a78218fe3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.637996] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0d4be0c5-2cbc-48d4-b612-0e0d38d9c2a0 tempest-InstanceActionsTestJSON-1735749390 tempest-InstanceActionsTestJSON-1735749390-project-member] Lock "b0090ea8-98fe-42a0-97cc-40d7578851a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.591s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.662901] env[68244]: DEBUG oslo_vmware.api [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Task: {'id': task-2780304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.329173} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.663313] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 804.663515] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 804.663697] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 804.664053] env[68244]: INFO nova.compute.manager [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 804.664249] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 804.664583] env[68244]: DEBUG nova.compute.manager [-] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 804.664583] env[68244]: DEBUG nova.network.neutron [-] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.755371] env[68244]: DEBUG nova.compute.manager [req-1dcce3e2-0318-42d2-b344-61e4aeccec27 req-0d8bcef1-cd7e-45cb-ac86-1e05afd6edfe service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Received event network-vif-deleted-3d483b86-624f-47ef-844a-5e5c7bf1d4ad {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 804.755577] env[68244]: INFO nova.compute.manager [req-1dcce3e2-0318-42d2-b344-61e4aeccec27 req-0d8bcef1-cd7e-45cb-ac86-1e05afd6edfe service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Neutron deleted interface 3d483b86-624f-47ef-844a-5e5c7bf1d4ad; detaching it from the instance and deleting it from the info cache [ 804.755739] env[68244]: DEBUG nova.network.neutron [req-1dcce3e2-0318-42d2-b344-61e4aeccec27 req-0d8bcef1-cd7e-45cb-ac86-1e05afd6edfe service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.850524] env[68244]: DEBUG nova.network.neutron [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.937141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.978923] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.108445] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Applying migration context for instance 10957648-8618-4f2c-8b08-5468bca20cfc as it has an incoming, in-progress migration 708441f4-9a09-4c99-bfc8-42d73de28a7f. Migration status is error {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 805.110363] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=68244) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 805.135456] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e8655168-1fe8-4590-90a3-2ad9438d7761 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d81bdefa-9c23-413b-9670-bbb2139084f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 805.137944] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 3776b39a-d10b-4068-8b4b-5dc25798e088 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance aa7c6967-cd55-47fc-a2f5-db6e8d2e0307 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 2aacd21f-d664-4267-8331-d3862f43d35b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e2099d6d-5ab7-4a3e-8034-a3b4fc422749 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 7778c027-d4af-436c-a545-aa513c0b1127 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 6abb889a-2e96-4aba-8e36-c4c8997dd4e2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 086dda59-4bd2-4ca2-a758-c120f1271f42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.137944] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 10957648-8618-4f2c-8b08-5468bca20cfc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.138488] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d1fb6fff-b1b7-4c1b-8995-41628cadf7d5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 805.138488] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance c9f5fbeb-28b6-4b41-9156-5b90bc19977c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.138488] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f113bb6c-f05a-4253-98af-ca827fcbb723 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.138488] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.138974] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f579141b-1fac-4541-99c3-07644a0a358c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.139314] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f5724973-2349-481c-b2ba-d1287f09c1db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 805.261626] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5da61e7c-44a5-4a8c-9c26-b68221793721 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.278477] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f4e90c-1e25-4a67-a4c5-c0619bb04eae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.317650] env[68244]: DEBUG nova.compute.manager [req-1dcce3e2-0318-42d2-b344-61e4aeccec27 req-0d8bcef1-cd7e-45cb-ac86-1e05afd6edfe service nova] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Detach interface failed, port_id=3d483b86-624f-47ef-844a-5e5c7bf1d4ad, reason: Instance c9f5fbeb-28b6-4b41-9156-5b90bc19977c could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 805.353689] env[68244]: INFO nova.compute.manager [-] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Took 1.63 seconds to deallocate network for instance. [ 805.556601] env[68244]: DEBUG nova.network.neutron [-] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.642636] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 100ec1f9-6776-4832-a4c2-e9a4def0d350 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 805.860407] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.941712] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "f113bb6c-f05a-4253-98af-ca827fcbb723" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.943968] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "f113bb6c-f05a-4253-98af-ca827fcbb723" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.944238] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "f113bb6c-f05a-4253-98af-ca827fcbb723-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.944435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "f113bb6c-f05a-4253-98af-ca827fcbb723-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.944608] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "f113bb6c-f05a-4253-98af-ca827fcbb723-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.948451] env[68244]: INFO nova.compute.manager [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Terminating instance [ 806.060430] env[68244]: INFO nova.compute.manager [-] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Took 1.39 seconds to deallocate network for instance. [ 806.147412] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 828865d7-d06a-4683-9149-987e6d9efbd9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.454611] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "refresh_cache-f113bb6c-f05a-4253-98af-ca827fcbb723" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.454849] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquired lock "refresh_cache-f113bb6c-f05a-4253-98af-ca827fcbb723" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.455071] env[68244]: DEBUG nova.network.neutron [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.565841] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.653124] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 340aa1e7-dc0a-4cba-8979-0c591830e9db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.992482] env[68244]: DEBUG nova.network.neutron [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.076089] env[68244]: DEBUG nova.network.neutron [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.156876] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance b0b79f25-f97d-4d59-ae80-2f8c09201073 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.213340] env[68244]: DEBUG nova.compute.manager [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 807.213340] env[68244]: DEBUG nova.compute.manager [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing instance network info cache due to event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 807.213340] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Acquiring lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.213340] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Acquired lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.213340] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.580739] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Releasing lock "refresh_cache-f113bb6c-f05a-4253-98af-ca827fcbb723" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.581187] env[68244]: DEBUG nova.compute.manager [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 807.581391] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.582337] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7ae9ad-4d93-4d90-b9bb-c8f903d10189 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.592871] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.593112] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4304ce1b-635d-4355-9186-a2b62c462fbb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.602221] env[68244]: DEBUG oslo_vmware.api [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 807.602221] env[68244]: value = "task-2780305" [ 807.602221] env[68244]: _type = "Task" [ 807.602221] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.613173] env[68244]: DEBUG oslo_vmware.api [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780305, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.660266] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 085b318d-e704-46f9-89a6-679b8aa49f85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.113747] env[68244]: DEBUG oslo_vmware.api [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780305, 'name': PowerOffVM_Task, 'duration_secs': 0.18866} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.114047] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.114224] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 808.114479] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33a0b83a-9e05-4c64-a6ac-eea866298601 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.143858] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 808.144103] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 808.144289] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleting the datastore file [datastore2] f113bb6c-f05a-4253-98af-ca827fcbb723 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 808.144553] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-048bb09e-6e2d-45cf-b761-e6a94ed618e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.151660] env[68244]: DEBUG oslo_vmware.api [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for the task: (returnval){ [ 808.151660] env[68244]: value = "task-2780307" [ 808.151660] env[68244]: _type = "Task" [ 808.151660] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.156486] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updated VIF entry in instance network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.156946] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updating instance_info_cache with network_info: [{"id": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "address": "fa:16:3e:b7:e2:f5", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1266d6b5-36", "ovs_interfaceid": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.165148] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 6915d271-8346-41b5-a75b-2188fd3b57d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.170027] env[68244]: DEBUG oslo_vmware.api [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.664933] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Releasing lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.665244] env[68244]: DEBUG nova.compute.manager [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Received event network-vif-deleted-a8cb98b0-596a-4263-96fc-669e34e6e364 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 808.665426] env[68244]: DEBUG nova.compute.manager [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 808.665588] env[68244]: DEBUG nova.compute.manager [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing instance network info cache due to event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 808.665790] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Acquiring lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.666686] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Acquired lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.666686] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.667930] env[68244]: DEBUG oslo_vmware.api [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Task: {'id': task-2780307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216023} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.668764] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 2d9dbf75-992d-4932-bd5d-84462494ebe8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.669901] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 808.670107] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 808.670302] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.671585] env[68244]: INFO nova.compute.manager [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Took 1.09 seconds to destroy the instance on the hypervisor. [ 808.671585] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.671585] env[68244]: DEBUG nova.compute.manager [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 808.671585] env[68244]: DEBUG nova.network.neutron [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.691939] env[68244]: DEBUG nova.network.neutron [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.172027] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 774ce6f8-6273-4f2b-b398-ee8c44d79520 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.194092] env[68244]: DEBUG nova.network.neutron [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.250939] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "f5724973-2349-481c-b2ba-d1287f09c1db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.251227] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "f5724973-2349-481c-b2ba-d1287f09c1db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.251428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "f5724973-2349-481c-b2ba-d1287f09c1db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.251608] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "f5724973-2349-481c-b2ba-d1287f09c1db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.251773] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "f5724973-2349-481c-b2ba-d1287f09c1db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.254848] env[68244]: INFO nova.compute.manager [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Terminating instance [ 809.266164] env[68244]: DEBUG nova.compute.manager [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Received event network-changed-ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 809.266412] env[68244]: DEBUG nova.compute.manager [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Refreshing instance network info cache due to event network-changed-ea8b9298-593f-4aec-a795-9d1e6163a01f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 809.266656] env[68244]: DEBUG oslo_concurrency.lockutils [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] Acquiring lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.266831] env[68244]: DEBUG oslo_concurrency.lockutils [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] Acquired lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.267136] env[68244]: DEBUG nova.network.neutron [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Refreshing network info cache for port ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 809.463011] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updated VIF entry in instance network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 809.463392] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updating instance_info_cache with network_info: [{"id": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "address": "fa:16:3e:b7:e2:f5", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1266d6b5-36", "ovs_interfaceid": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.675253] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8c00240d-5124-4ada-bd4d-4acd39a345c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.697837] env[68244]: INFO nova.compute.manager [-] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Took 1.03 seconds to deallocate network for instance. [ 809.760686] env[68244]: DEBUG nova.compute.manager [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 809.762596] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 809.763182] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8daa7c96-4f8f-4413-bbb0-9aa5897e29ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.776453] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.776879] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4354648-62a2-40ac-b910-0c2eb964beca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.785946] env[68244]: DEBUG oslo_vmware.api [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 809.785946] env[68244]: value = "task-2780308" [ 809.785946] env[68244]: _type = "Task" [ 809.785946] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.797729] env[68244]: DEBUG oslo_vmware.api [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.965916] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Releasing lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.966223] env[68244]: DEBUG nova.compute.manager [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Received event network-changed-ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 809.966382] env[68244]: DEBUG nova.compute.manager [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Refreshing instance network info cache due to event network-changed-ea8b9298-593f-4aec-a795-9d1e6163a01f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 809.966598] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Acquiring lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.181927] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 184f7694-9cab-4184-a1c0-926763a81baf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.198534] env[68244]: DEBUG nova.network.neutron [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updated VIF entry in instance network info cache for port ea8b9298-593f-4aec-a795-9d1e6163a01f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 810.198906] env[68244]: DEBUG nova.network.neutron [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updating instance_info_cache with network_info: [{"id": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "address": "fa:16:3e:60:2d:09", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b9298-59", "ovs_interfaceid": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.204168] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.298646] env[68244]: DEBUG oslo_vmware.api [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780308, 'name': PowerOffVM_Task, 'duration_secs': 0.256811} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.299064] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.299966] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 810.300774] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c3bfa2a-82b9-41b3-8eab-c3d5ebf4e87c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.374081] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 810.374081] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 810.374081] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Deleting the datastore file [datastore2] f5724973-2349-481c-b2ba-d1287f09c1db {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.374081] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-378b694c-004e-4ba8-ad6f-ca3fb3e4f6e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.381997] env[68244]: DEBUG oslo_vmware.api [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 810.381997] env[68244]: value = "task-2780310" [ 810.381997] env[68244]: _type = "Task" [ 810.381997] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.392052] env[68244]: DEBUG oslo_vmware.api [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.689798] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 874d6895-0f3d-4a99-b27a-cad627ddeecd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.701485] env[68244]: DEBUG oslo_concurrency.lockutils [req-5e8b417b-f863-40e9-949d-ca59375fb78a req-10b8ba5a-1b76-439a-96e5-2b7dbce2c2e8 service nova] Releasing lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.701996] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Acquired lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.702209] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Refreshing network info cache for port ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.896830] env[68244]: DEBUG oslo_vmware.api [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211286} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.898755] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.899033] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 810.899231] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 810.899404] env[68244]: INFO nova.compute.manager [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Took 1.14 seconds to destroy the instance on the hypervisor. [ 810.899645] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.899833] env[68244]: DEBUG nova.compute.manager [-] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 810.899924] env[68244]: DEBUG nova.network.neutron [-] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 811.193356] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.445037] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updated VIF entry in instance network info cache for port ea8b9298-593f-4aec-a795-9d1e6163a01f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.445447] env[68244]: DEBUG nova.network.neutron [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updating instance_info_cache with network_info: [{"id": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "address": "fa:16:3e:60:2d:09", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea8b9298-59", "ovs_interfaceid": "ea8b9298-593f-4aec-a795-9d1e6163a01f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.695680] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d46f6695-7a96-4e0b-b43a-236bcb4ec519 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.909671] env[68244]: DEBUG nova.compute.manager [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 811.909671] env[68244]: DEBUG nova.compute.manager [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing instance network info cache due to event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 811.909671] env[68244]: DEBUG oslo_concurrency.lockutils [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] Acquiring lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.909882] env[68244]: DEBUG oslo_concurrency.lockutils [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] Acquired lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.910079] env[68244]: DEBUG nova.network.neutron [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 811.949547] env[68244]: DEBUG oslo_concurrency.lockutils [req-b41fa099-919b-4314-8489-ca07aeac9fe8 req-605092e7-7558-44ad-890d-86fb061d9e16 service nova] Releasing lock "refresh_cache-f5724973-2349-481c-b2ba-d1287f09c1db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.145265] env[68244]: DEBUG nova.network.neutron [-] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.200205] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ffa17045-fadf-47d7-9c3b-19d0d54de3fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.647730] env[68244]: INFO nova.compute.manager [-] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Took 1.75 seconds to deallocate network for instance. [ 812.705777] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance df4674a2-87de-4507-950a-5941fae93aab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.717130] env[68244]: DEBUG nova.network.neutron [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updated VIF entry in instance network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 812.717566] env[68244]: DEBUG nova.network.neutron [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updating instance_info_cache with network_info: [{"id": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "address": "fa:16:3e:b7:e2:f5", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1266d6b5-36", "ovs_interfaceid": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.153898] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.208811] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 45ec526b-e9d8-4ea3-b0c8-af6da39b0158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.220844] env[68244]: DEBUG oslo_concurrency.lockutils [req-7bcf8e58-4951-459b-a72a-65db08127d91 req-664c0822-c3a6-48c4-87b2-9332b620d574 service nova] Releasing lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.712461] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance cedcff81-0010-4fa6-95bf-72a4dcac5427 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.939740] env[68244]: DEBUG nova.compute.manager [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Received event network-vif-deleted-ea8b9298-593f-4aec-a795-9d1e6163a01f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 813.940011] env[68244]: DEBUG nova.compute.manager [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 813.940243] env[68244]: DEBUG nova.compute.manager [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing instance network info cache due to event network-changed-1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 813.940496] env[68244]: DEBUG oslo_concurrency.lockutils [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] Acquiring lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.940668] env[68244]: DEBUG oslo_concurrency.lockutils [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] Acquired lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.940875] env[68244]: DEBUG nova.network.neutron [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Refreshing network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.215752] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 2487689d-7a83-49d7-be78-fbb946ebef8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.718881] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d74a0d56-8656-429c-a703-fca87e07798f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.719272] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 814.719456] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 814.735750] env[68244]: DEBUG nova.network.neutron [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updated VIF entry in instance network info cache for port 1266d6b5-36fc-49f9-ab98-42add17e5a24. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 814.736160] env[68244]: DEBUG nova.network.neutron [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updating instance_info_cache with network_info: [{"id": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "address": "fa:16:3e:b7:e2:f5", "network": {"id": "ffd97aa6-542e-48ed-abdf-da687721994c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1732363127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4138f768f30b41d1983fc67959dec2e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c405e9f-a6c8-4308-acac-071654efe18e", "external-id": "nsx-vlan-transportzone-851", "segmentation_id": 851, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1266d6b5-36", "ovs_interfaceid": "1266d6b5-36fc-49f9-ab98-42add17e5a24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.225196] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc38c08-6ee8-4800-99d8-14137c0c6714 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.233967] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d45a10-d2ad-4f61-994c-c77678beb5f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.238792] env[68244]: DEBUG oslo_concurrency.lockutils [req-347eb1df-369e-48a9-98dc-d4eede6b1f2b req-e218efd7-021a-4c5d-9fec-ad42e01218f4 service nova] Releasing lock "refresh_cache-aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.264675] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe95b5d-7404-4e77-9e15-663ce3e40cf6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.273151] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a59709-a80b-4594-a43e-9c00f72d6a00 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.287041] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.791048] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.296881] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 816.297303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.204s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.297607] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.919s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.299156] env[68244]: INFO nova.compute.claims [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.758150] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c35cdbb-f329-4893-b254-277005f276ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.766960] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7426209c-8e59-4e63-8f53-c230e76799dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.797859] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48560ac0-5b88-4721-b2f8-27f52b4fb3c2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.805679] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8b07da-b54c-4a18-89ed-6ea6026d0bf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.819171] env[68244]: DEBUG nova.compute.provider_tree [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.322291] env[68244]: DEBUG nova.scheduler.client.report [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.827340] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.827923] env[68244]: DEBUG nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.830570] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.202s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.833633] env[68244]: INFO nova.compute.claims [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 819.338648] env[68244]: DEBUG nova.compute.utils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.342455] env[68244]: DEBUG nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.345105] env[68244]: DEBUG nova.network.neutron [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.382780] env[68244]: DEBUG nova.policy [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6af77f00c84d4e99bea878bc30dcc361', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '821b99c053aa45b4b6b8fb09eb63aa73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.709366] env[68244]: DEBUG nova.network.neutron [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Successfully created port: 23889c4c-9b8b-4405-b957-90fda02d4ef5 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.843805] env[68244]: DEBUG nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.415417] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475d1f08-af14-49f3-8636-8522117c3ceb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.423765] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83b4f46-90dc-45d2-8848-c95d0dad69de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.453254] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ceb4b68-774a-4c6e-8619-b3aa4acbf1bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.460265] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9033b15-25a1-49e7-85dc-b75b59fa3b18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.474017] env[68244]: DEBUG nova.compute.provider_tree [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.857212] env[68244]: DEBUG nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.885507] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.885653] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.885791] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.886047] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.886237] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.886415] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.886652] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.886843] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.887051] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.887290] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.887480] env[68244]: DEBUG nova.virt.hardware [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.888390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85da7c6c-2d38-4c8b-ac22-d21c8aaa1c56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.896434] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34497e58-ed32-47fd-89b8-c165be10abee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.977240] env[68244]: DEBUG nova.scheduler.client.report [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.273381] env[68244]: DEBUG nova.compute.manager [req-d36c79c9-445c-441f-b38f-b755cecb5750 req-d157aaad-9977-4fe6-8482-c10bacec8cc5 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Received event network-vif-plugged-23889c4c-9b8b-4405-b957-90fda02d4ef5 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 821.273746] env[68244]: DEBUG oslo_concurrency.lockutils [req-d36c79c9-445c-441f-b38f-b755cecb5750 req-d157aaad-9977-4fe6-8482-c10bacec8cc5 service nova] Acquiring lock "100ec1f9-6776-4832-a4c2-e9a4def0d350-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.273850] env[68244]: DEBUG oslo_concurrency.lockutils [req-d36c79c9-445c-441f-b38f-b755cecb5750 req-d157aaad-9977-4fe6-8482-c10bacec8cc5 service nova] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.274113] env[68244]: DEBUG oslo_concurrency.lockutils [req-d36c79c9-445c-441f-b38f-b755cecb5750 req-d157aaad-9977-4fe6-8482-c10bacec8cc5 service nova] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.274371] env[68244]: DEBUG nova.compute.manager [req-d36c79c9-445c-441f-b38f-b755cecb5750 req-d157aaad-9977-4fe6-8482-c10bacec8cc5 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] No waiting events found dispatching network-vif-plugged-23889c4c-9b8b-4405-b957-90fda02d4ef5 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.274518] env[68244]: WARNING nova.compute.manager [req-d36c79c9-445c-441f-b38f-b755cecb5750 req-d157aaad-9977-4fe6-8482-c10bacec8cc5 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Received unexpected event network-vif-plugged-23889c4c-9b8b-4405-b957-90fda02d4ef5 for instance with vm_state building and task_state spawning. [ 821.434735] env[68244]: DEBUG nova.network.neutron [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Successfully updated port: 23889c4c-9b8b-4405-b957-90fda02d4ef5 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.482791] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.483216] env[68244]: DEBUG nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 821.486114] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.931s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.487941] env[68244]: INFO nova.compute.claims [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.938711] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "refresh_cache-100ec1f9-6776-4832-a4c2-e9a4def0d350" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.938958] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "refresh_cache-100ec1f9-6776-4832-a4c2-e9a4def0d350" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.939046] env[68244]: DEBUG nova.network.neutron [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.993384] env[68244]: DEBUG nova.compute.utils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.997939] env[68244]: DEBUG nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 821.998266] env[68244]: DEBUG nova.network.neutron [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 822.178936] env[68244]: DEBUG nova.policy [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '007f620cfbc44d128d43fb0ccaa13517', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ade72f4f2a384ddb997b5d0f33afc5c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 822.472284] env[68244]: DEBUG nova.network.neutron [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.498546] env[68244]: DEBUG nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 822.627893] env[68244]: DEBUG nova.network.neutron [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Updating instance_info_cache with network_info: [{"id": "23889c4c-9b8b-4405-b957-90fda02d4ef5", "address": "fa:16:3e:52:19:a7", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23889c4c-9b", "ovs_interfaceid": "23889c4c-9b8b-4405-b957-90fda02d4ef5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.634770] env[68244]: DEBUG nova.network.neutron [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Successfully created port: e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.003766] env[68244]: INFO nova.virt.block_device [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Booting with volume fbca3648-be41-4048-bbb7-c27ab5f4f106 at /dev/sda [ 823.031978] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee02f4b-5b3b-40e0-bb45-602df4fc8515 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.041369] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312f23ad-4e1b-40d7-8a54-ec23ab7416f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.045265] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97088cc7-f113-412b-b901-1b2a650a262e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.077658] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6aee405-7c79-4d08-8893-dfce32397f40 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.088527] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a9ea05-0c32-4300-b12d-7b20c4d2c16b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.095767] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73979f36-4509-44ff-a60b-97e4da560eb3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.108689] env[68244]: DEBUG nova.compute.provider_tree [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.120906] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38b8458a-41b5-438f-8212-75df2ad3b39c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.128632] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fd0074-5847-4e7e-9472-7ae7bd91b6d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.140310] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "refresh_cache-100ec1f9-6776-4832-a4c2-e9a4def0d350" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.140520] env[68244]: DEBUG nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Instance network_info: |[{"id": "23889c4c-9b8b-4405-b957-90fda02d4ef5", "address": "fa:16:3e:52:19:a7", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23889c4c-9b", "ovs_interfaceid": "23889c4c-9b8b-4405-b957-90fda02d4ef5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.140894] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:19:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23889c4c-9b8b-4405-b957-90fda02d4ef5', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.148261] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.149102] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.149350] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8ebb7c1-1b05-4cf6-b3e4-87f1a75bc5d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.175882] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e027992f-deb8-49e3-9843-edf53cec6988 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.180742] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.180742] env[68244]: value = "task-2780311" [ 823.180742] env[68244]: _type = "Task" [ 823.180742] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.185713] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13aa5a18-f253-42be-a1b3-b4e4a408978f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.192529] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780311, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.200724] env[68244]: DEBUG nova.virt.block_device [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updating existing volume attachment record: 75a1e31f-2f6b-4776-9e10-07ce94cb5d8f {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 823.312822] env[68244]: DEBUG nova.compute.manager [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Received event network-changed-23889c4c-9b8b-4405-b957-90fda02d4ef5 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 823.314018] env[68244]: DEBUG nova.compute.manager [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Refreshing instance network info cache due to event network-changed-23889c4c-9b8b-4405-b957-90fda02d4ef5. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 823.314018] env[68244]: DEBUG oslo_concurrency.lockutils [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] Acquiring lock "refresh_cache-100ec1f9-6776-4832-a4c2-e9a4def0d350" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.314018] env[68244]: DEBUG oslo_concurrency.lockutils [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] Acquired lock "refresh_cache-100ec1f9-6776-4832-a4c2-e9a4def0d350" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.314379] env[68244]: DEBUG nova.network.neutron [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Refreshing network info cache for port 23889c4c-9b8b-4405-b957-90fda02d4ef5 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.622560] env[68244]: DEBUG nova.scheduler.client.report [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.692356] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780311, 'name': CreateVM_Task, 'duration_secs': 0.328191} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.692529] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 823.693230] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.693394] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.693708] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 823.693960] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04b3c93d-ea07-4761-aed3-59a0135e8f8f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.698856] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 823.698856] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9b8dd-e0ec-67c7-9e45-a2c39b5256ef" [ 823.698856] env[68244]: _type = "Task" [ 823.698856] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.706790] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9b8dd-e0ec-67c7-9e45-a2c39b5256ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.021078] env[68244]: DEBUG nova.network.neutron [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Updated VIF entry in instance network info cache for port 23889c4c-9b8b-4405-b957-90fda02d4ef5. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.021477] env[68244]: DEBUG nova.network.neutron [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Updating instance_info_cache with network_info: [{"id": "23889c4c-9b8b-4405-b957-90fda02d4ef5", "address": "fa:16:3e:52:19:a7", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23889c4c-9b", "ovs_interfaceid": "23889c4c-9b8b-4405-b957-90fda02d4ef5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.127284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.128063] env[68244]: DEBUG nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 824.132165] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.544s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.137658] env[68244]: INFO nova.compute.claims [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.212034] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9b8dd-e0ec-67c7-9e45-a2c39b5256ef, 'name': SearchDatastore_Task, 'duration_secs': 0.011612} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.212034] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.212034] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.212034] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.212034] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.212034] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.212034] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f86d0b3d-686b-4e34-9fdc-ccb626fc7f0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.218698] env[68244]: DEBUG nova.network.neutron [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Successfully updated port: e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 824.225771] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.226117] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.227427] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65f0538d-960d-4f96-8292-a8ab98a8d863 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.233464] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 824.233464] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cb55e6-894f-5a57-49a0-93d38e8c4b50" [ 824.233464] env[68244]: _type = "Task" [ 824.233464] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.242512] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cb55e6-894f-5a57-49a0-93d38e8c4b50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.523774] env[68244]: DEBUG oslo_concurrency.lockutils [req-6ff3e4f6-b3c6-4852-9e62-91a682c42761 req-358aa334-5f41-45a7-a427-a92bd9827f68 service nova] Releasing lock "refresh_cache-100ec1f9-6776-4832-a4c2-e9a4def0d350" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.633991] env[68244]: DEBUG nova.compute.utils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 824.635525] env[68244]: DEBUG nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 824.635611] env[68244]: DEBUG nova.network.neutron [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.709910] env[68244]: DEBUG nova.policy [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '675f446d4ee84a3f83ba0e211cbb9f16', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb00e6745be3453c87b69a7638c20bb8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.723512] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Acquiring lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.724847] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Acquired lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.724847] env[68244]: DEBUG nova.network.neutron [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.747024] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cb55e6-894f-5a57-49a0-93d38e8c4b50, 'name': SearchDatastore_Task, 'duration_secs': 0.008947} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.747024] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffb536f3-372c-431e-8458-0838d83ceb43 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.752640] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 824.752640] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d3ad1e-68b3-8787-401e-c8b0680a9bac" [ 824.752640] env[68244]: _type = "Task" [ 824.752640] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.761239] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d3ad1e-68b3-8787-401e-c8b0680a9bac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.139325] env[68244]: DEBUG nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 825.148692] env[68244]: DEBUG nova.network.neutron [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Successfully created port: e3259135-2e40-4236-8149-b4172ef87318 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.256847] env[68244]: DEBUG nova.network.neutron [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.265008] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d3ad1e-68b3-8787-401e-c8b0680a9bac, 'name': SearchDatastore_Task, 'duration_secs': 0.009042} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.267397] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.267664] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 100ec1f9-6776-4832-a4c2-e9a4def0d350/100ec1f9-6776-4832-a4c2-e9a4def0d350.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.268096] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07c071f6-994d-4431-a4b0-aa56be14ee43 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.275773] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 825.275773] env[68244]: value = "task-2780312" [ 825.275773] env[68244]: _type = "Task" [ 825.275773] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.286103] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.291500] env[68244]: DEBUG nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.292663] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.292663] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.292663] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.292663] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.292663] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.292663] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.293132] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.293132] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.293213] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.296937] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.296937] env[68244]: DEBUG nova.virt.hardware [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.296937] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7fe98b-abf6-40be-80b0-e3e1df8a3680 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.307312] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abf50c0-70da-4863-bb18-d0ddfdcc0b56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.397393] env[68244]: DEBUG nova.compute.manager [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Received event network-vif-plugged-e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 825.397605] env[68244]: DEBUG oslo_concurrency.lockutils [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] Acquiring lock "828865d7-d06a-4683-9149-987e6d9efbd9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.397803] env[68244]: DEBUG oslo_concurrency.lockutils [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] Lock "828865d7-d06a-4683-9149-987e6d9efbd9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.397968] env[68244]: DEBUG oslo_concurrency.lockutils [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] Lock "828865d7-d06a-4683-9149-987e6d9efbd9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.398218] env[68244]: DEBUG nova.compute.manager [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] No waiting events found dispatching network-vif-plugged-e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 825.398425] env[68244]: WARNING nova.compute.manager [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Received unexpected event network-vif-plugged-e14972f2-13d5-417d-9c9b-9a0f731e4e44 for instance with vm_state building and task_state spawning. [ 825.398554] env[68244]: DEBUG nova.compute.manager [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Received event network-changed-e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 825.398676] env[68244]: DEBUG nova.compute.manager [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Refreshing instance network info cache due to event network-changed-e14972f2-13d5-417d-9c9b-9a0f731e4e44. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 825.398835] env[68244]: DEBUG oslo_concurrency.lockutils [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] Acquiring lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.420328] env[68244]: DEBUG nova.network.neutron [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updating instance_info_cache with network_info: [{"id": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "address": "fa:16:3e:c3:fd:a7", "network": {"id": "0e6fc214-45cf-4ada-83ee-0f7b293ea9f8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-951457791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ade72f4f2a384ddb997b5d0f33afc5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14972f2-13", "ovs_interfaceid": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.764136] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc71a04-667c-40b1-9cf7-0f6179970be1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.771657] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb0ca11-ba52-4667-96f5-88c68320953e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.805235] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab0f2cc-e93d-4fc4-a654-587d2f61579b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.810869] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471642} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.811458] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 100ec1f9-6776-4832-a4c2-e9a4def0d350/100ec1f9-6776-4832-a4c2-e9a4def0d350.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 825.811674] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 825.811914] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-395c427f-a1f7-40a7-a2a7-d49b068efe26 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.817353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1cd575-c7c8-4dae-ad47-10613045bc57 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.822132] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 825.822132] env[68244]: value = "task-2780313" [ 825.822132] env[68244]: _type = "Task" [ 825.822132] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.832789] env[68244]: DEBUG nova.compute.provider_tree [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.838735] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.924061] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Releasing lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.924404] env[68244]: DEBUG nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Instance network_info: |[{"id": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "address": "fa:16:3e:c3:fd:a7", "network": {"id": "0e6fc214-45cf-4ada-83ee-0f7b293ea9f8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-951457791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ade72f4f2a384ddb997b5d0f33afc5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14972f2-13", "ovs_interfaceid": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 825.924697] env[68244]: DEBUG oslo_concurrency.lockutils [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] Acquired lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.924875] env[68244]: DEBUG nova.network.neutron [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Refreshing network info cache for port e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.926147] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:fd:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24376631-ee89-4ff1-b8ac-f09911fc8329', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e14972f2-13d5-417d-9c9b-9a0f731e4e44', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.933767] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Creating folder: Project (ade72f4f2a384ddb997b5d0f33afc5c3). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.934288] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06076d22-7536-499f-a880-3f87c628ffd6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.949282] env[68244]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 825.949440] env[68244]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68244) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 825.949745] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Folder already exists: Project (ade72f4f2a384ddb997b5d0f33afc5c3). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 825.949928] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Creating folder: Instances. Parent ref: group-v558935. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.950162] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e34f1843-3e86-4d02-aff6-b77a746dd7e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.958885] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Created folder: Instances in parent group-v558935. [ 825.959139] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.959910] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.960167] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a55f7094-647b-48cc-9ada-688183bd8260 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.979144] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.979144] env[68244]: value = "task-2780316" [ 825.979144] env[68244]: _type = "Task" [ 825.979144] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.987392] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780316, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.151284] env[68244]: DEBUG nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 826.177309] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 826.177608] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.177844] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 826.178276] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.178500] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 826.178707] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 826.179074] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 826.179272] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 826.179495] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 826.179693] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 826.179940] env[68244]: DEBUG nova.virt.hardware [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 826.181013] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7327f8-4ce3-4588-a118-e0dee99a5623 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.192019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a075fa9a-eb55-498c-a518-3d9728533eb8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.331770] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.251709} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.332567] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.333345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119a2295-0ecc-435c-bb23-d38bcad64385 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.336559] env[68244]: DEBUG nova.scheduler.client.report [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 826.359039] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 100ec1f9-6776-4832-a4c2-e9a4def0d350/100ec1f9-6776-4832-a4c2-e9a4def0d350.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.359944] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15a562a0-5981-479f-82f7-5beb42aed386 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.379922] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 826.379922] env[68244]: value = "task-2780317" [ 826.379922] env[68244]: _type = "Task" [ 826.379922] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.389951] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780317, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.490016] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780316, 'name': CreateVM_Task, 'duration_secs': 0.345162} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.490347] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.491032] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'boot_index': 0, 'guest_format': None, 'attachment_id': '75a1e31f-2f6b-4776-9e10-07ce94cb5d8f', 'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558946', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'name': 'volume-fbca3648-be41-4048-bbb7-c27ab5f4f106', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '828865d7-d06a-4683-9149-987e6d9efbd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'serial': 'fbca3648-be41-4048-bbb7-c27ab5f4f106'}, 'volume_type': None}], 'swap': None} {{(pid=68244) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 826.491391] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Root volume attach. Driver type: vmdk {{(pid=68244) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 826.492177] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71d331e-ac05-4197-b950-3b45420ebb88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.500085] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639e0f75-959a-478c-8c7e-eb44ab72d454 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.506078] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865058e3-7b77-4ebc-9508-4911ed7dc716 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.514518] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-1275a6b2-fcbf-43d2-9b92-e443c3811652 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.521456] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 826.521456] env[68244]: value = "task-2780318" [ 826.521456] env[68244]: _type = "Task" [ 826.521456] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.536846] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780318, 'name': RelocateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.670036] env[68244]: DEBUG nova.network.neutron [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updated VIF entry in instance network info cache for port e14972f2-13d5-417d-9c9b-9a0f731e4e44. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 826.670510] env[68244]: DEBUG nova.network.neutron [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updating instance_info_cache with network_info: [{"id": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "address": "fa:16:3e:c3:fd:a7", "network": {"id": "0e6fc214-45cf-4ada-83ee-0f7b293ea9f8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-951457791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ade72f4f2a384ddb997b5d0f33afc5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14972f2-13", "ovs_interfaceid": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.842109] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.842513] env[68244]: DEBUG nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.845408] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.015s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.846772] env[68244]: INFO nova.compute.claims [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.891197] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780317, 'name': ReconfigVM_Task, 'duration_secs': 0.285364} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.891471] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 100ec1f9-6776-4832-a4c2-e9a4def0d350/100ec1f9-6776-4832-a4c2-e9a4def0d350.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 826.892293] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2bff6b54-d59d-44dc-820e-759b3eeb3bd5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.898639] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 826.898639] env[68244]: value = "task-2780319" [ 826.898639] env[68244]: _type = "Task" [ 826.898639] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.906915] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780319, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.927401] env[68244]: DEBUG nova.network.neutron [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Successfully updated port: e3259135-2e40-4236-8149-b4172ef87318 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.032248] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780318, 'name': RelocateVM_Task, 'duration_secs': 0.02716} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.032588] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 827.032852] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558946', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'name': 'volume-fbca3648-be41-4048-bbb7-c27ab5f4f106', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '828865d7-d06a-4683-9149-987e6d9efbd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'serial': 'fbca3648-be41-4048-bbb7-c27ab5f4f106'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 827.033713] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac692d4-575b-4d2b-8f5e-fc570a912d73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.054261] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155dd1f4-fe09-47cb-820a-1678e9ac8530 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.087026] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] volume-fbca3648-be41-4048-bbb7-c27ab5f4f106/volume-fbca3648-be41-4048-bbb7-c27ab5f4f106.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.087026] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12c3c374-7819-408b-a9a8-67cc9bfe26b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.106955] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 827.106955] env[68244]: value = "task-2780320" [ 827.106955] env[68244]: _type = "Task" [ 827.106955] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.115013] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780320, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.173735] env[68244]: DEBUG oslo_concurrency.lockutils [req-ca54c419-7ade-41b9-8402-86f26f8cf462 req-a99fa636-f996-4ad3-a7e8-e6de39a12dd4 service nova] Releasing lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.351056] env[68244]: DEBUG nova.compute.utils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 827.354145] env[68244]: DEBUG nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 827.354346] env[68244]: DEBUG nova.network.neutron [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.398517] env[68244]: DEBUG nova.policy [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'befd232894ad4fdcbda5dd7aba055aef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd31896823df441cb451756c990f51f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 827.408497] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780319, 'name': Rename_Task, 'duration_secs': 0.136616} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.408787] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.409038] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e7a4efc-2be0-42ea-bc3f-4def0dc314c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.415351] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 827.415351] env[68244]: value = "task-2780321" [ 827.415351] env[68244]: _type = "Task" [ 827.415351] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.423082] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780321, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.424970] env[68244]: DEBUG nova.compute.manager [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Received event network-vif-plugged-e3259135-2e40-4236-8149-b4172ef87318 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 827.425287] env[68244]: DEBUG oslo_concurrency.lockutils [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] Acquiring lock "340aa1e7-dc0a-4cba-8979-0c591830e9db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.425565] env[68244]: DEBUG oslo_concurrency.lockutils [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.425744] env[68244]: DEBUG oslo_concurrency.lockutils [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.425914] env[68244]: DEBUG nova.compute.manager [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] No waiting events found dispatching network-vif-plugged-e3259135-2e40-4236-8149-b4172ef87318 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 827.426095] env[68244]: WARNING nova.compute.manager [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Received unexpected event network-vif-plugged-e3259135-2e40-4236-8149-b4172ef87318 for instance with vm_state building and task_state spawning. [ 827.426267] env[68244]: DEBUG nova.compute.manager [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Received event network-changed-e3259135-2e40-4236-8149-b4172ef87318 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 827.426414] env[68244]: DEBUG nova.compute.manager [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Refreshing instance network info cache due to event network-changed-e3259135-2e40-4236-8149-b4172ef87318. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 827.426600] env[68244]: DEBUG oslo_concurrency.lockutils [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] Acquiring lock "refresh_cache-340aa1e7-dc0a-4cba-8979-0c591830e9db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.426732] env[68244]: DEBUG oslo_concurrency.lockutils [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] Acquired lock "refresh_cache-340aa1e7-dc0a-4cba-8979-0c591830e9db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.427394] env[68244]: DEBUG nova.network.neutron [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Refreshing network info cache for port e3259135-2e40-4236-8149-b4172ef87318 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.429781] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "refresh_cache-340aa1e7-dc0a-4cba-8979-0c591830e9db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.620502] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780320, 'name': ReconfigVM_Task, 'duration_secs': 0.266553} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.620777] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Reconfigured VM instance instance-00000029 to attach disk [datastore2] volume-fbca3648-be41-4048-bbb7-c27ab5f4f106/volume-fbca3648-be41-4048-bbb7-c27ab5f4f106.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.629726] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e2fb200-7e89-49f0-9fe6-9504ed55ea8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.645564] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 827.645564] env[68244]: value = "task-2780322" [ 827.645564] env[68244]: _type = "Task" [ 827.645564] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.653797] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780322, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.777033] env[68244]: DEBUG nova.network.neutron [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Successfully created port: c231c79b-11e8-4987-8977-587e745b5cbe {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.862022] env[68244]: DEBUG nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.924801] env[68244]: DEBUG oslo_vmware.api [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780321, 'name': PowerOnVM_Task, 'duration_secs': 0.50275} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.925143] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.926779] env[68244]: INFO nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Took 7.07 seconds to spawn the instance on the hypervisor. [ 827.926779] env[68244]: DEBUG nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.926779] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6271b38-cae0-415b-a15c-b89b9457cba7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.969080] env[68244]: DEBUG nova.network.neutron [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.061369] env[68244]: DEBUG nova.network.neutron [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.158786] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780322, 'name': ReconfigVM_Task, 'duration_secs': 0.131807} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.162152] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558946', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'name': 'volume-fbca3648-be41-4048-bbb7-c27ab5f4f106', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '828865d7-d06a-4683-9149-987e6d9efbd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'serial': 'fbca3648-be41-4048-bbb7-c27ab5f4f106'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 828.164278] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e81683b8-ea41-472f-b21b-55ba2568840f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.173482] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 828.173482] env[68244]: value = "task-2780323" [ 828.173482] env[68244]: _type = "Task" [ 828.173482] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.186250] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780323, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.450160] env[68244]: INFO nova.compute.manager [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Took 60.09 seconds to build instance. [ 828.453654] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccacb39-3038-4e05-bcef-569ee2bd0012 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.461662] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b749b7b4-532f-420e-af09-f5e92ea5aae2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.497919] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75d8e5c-f7f5-408e-8870-595448d74e36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.505107] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce46090-b5cb-4fd9-872c-b5fb0cbf8c6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.518482] env[68244]: DEBUG nova.compute.provider_tree [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.564020] env[68244]: DEBUG oslo_concurrency.lockutils [req-205ec358-1694-4985-9f57-f7cdcdf21ab3 req-72ff21ce-757a-439f-8d3c-2cf3f61761a9 service nova] Releasing lock "refresh_cache-340aa1e7-dc0a-4cba-8979-0c591830e9db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.564390] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquired lock "refresh_cache-340aa1e7-dc0a-4cba-8979-0c591830e9db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.564583] env[68244]: DEBUG nova.network.neutron [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.684434] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780323, 'name': Rename_Task, 'duration_secs': 0.272332} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.685036] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.685036] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca015103-5d86-42a5-8b54-0ed32e67ea9b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.691490] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 828.691490] env[68244]: value = "task-2780324" [ 828.691490] env[68244]: _type = "Task" [ 828.691490] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.700843] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.868975] env[68244]: DEBUG nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.890585] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c03ca0a-35ce-48a7-9857-be43ad9f2fb4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.896537] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.896792] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.896932] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.897164] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.897398] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.897452] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.897679] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.897797] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.897955] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.898154] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.898352] env[68244]: DEBUG nova.virt.hardware [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.899146] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc90812-5ac6-487b-8bbb-13d1ebe10e95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.905389] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f8a5f0-1838-4265-aae8-a0946abc4de8 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Suspending the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 828.905389] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c05a0b95-e4cb-4148-ad2c-ae4f5364b412 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.908975] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de4ffa9-7791-4ab3-a588-ff96c42caa3f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.917460] env[68244]: DEBUG oslo_vmware.api [None req-b3f8a5f0-1838-4265-aae8-a0946abc4de8 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 828.917460] env[68244]: value = "task-2780325" [ 828.917460] env[68244]: _type = "Task" [ 828.917460] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.933183] env[68244]: DEBUG oslo_vmware.api [None req-b3f8a5f0-1838-4265-aae8-a0946abc4de8 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780325, 'name': SuspendVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.951891] env[68244]: DEBUG oslo_concurrency.lockutils [None req-778e9ac9-1ae6-40d6-97a5-d0f3ee961a6c tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.607s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.021884] env[68244]: DEBUG nova.scheduler.client.report [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 829.107668] env[68244]: DEBUG nova.network.neutron [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.201597] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780324, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.270551] env[68244]: DEBUG nova.network.neutron [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Updating instance_info_cache with network_info: [{"id": "e3259135-2e40-4236-8149-b4172ef87318", "address": "fa:16:3e:66:1c:e0", "network": {"id": "ea57302f-c973-4dcd-adc8-15a02be98ceb", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1911517092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb00e6745be3453c87b69a7638c20bb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3259135-2e", "ovs_interfaceid": "e3259135-2e40-4236-8149-b4172ef87318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.425193] env[68244]: DEBUG oslo_vmware.api [None req-b3f8a5f0-1838-4265-aae8-a0946abc4de8 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780325, 'name': SuspendVM_Task} progress is 58%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.454561] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 829.460871] env[68244]: DEBUG nova.compute.manager [req-3e54afc2-e3f5-40f9-a6ad-9eb062200d89 req-298e63be-e939-43d6-a738-ea0592f90890 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Received event network-vif-plugged-c231c79b-11e8-4987-8977-587e745b5cbe {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 829.461216] env[68244]: DEBUG oslo_concurrency.lockutils [req-3e54afc2-e3f5-40f9-a6ad-9eb062200d89 req-298e63be-e939-43d6-a738-ea0592f90890 service nova] Acquiring lock "b0b79f25-f97d-4d59-ae80-2f8c09201073-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.461707] env[68244]: DEBUG oslo_concurrency.lockutils [req-3e54afc2-e3f5-40f9-a6ad-9eb062200d89 req-298e63be-e939-43d6-a738-ea0592f90890 service nova] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.461985] env[68244]: DEBUG oslo_concurrency.lockutils [req-3e54afc2-e3f5-40f9-a6ad-9eb062200d89 req-298e63be-e939-43d6-a738-ea0592f90890 service nova] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.462307] env[68244]: DEBUG nova.compute.manager [req-3e54afc2-e3f5-40f9-a6ad-9eb062200d89 req-298e63be-e939-43d6-a738-ea0592f90890 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] No waiting events found dispatching network-vif-plugged-c231c79b-11e8-4987-8977-587e745b5cbe {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 829.462609] env[68244]: WARNING nova.compute.manager [req-3e54afc2-e3f5-40f9-a6ad-9eb062200d89 req-298e63be-e939-43d6-a738-ea0592f90890 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Received unexpected event network-vif-plugged-c231c79b-11e8-4987-8977-587e745b5cbe for instance with vm_state building and task_state spawning. [ 829.527849] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.682s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.528388] env[68244]: DEBUG nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.531965] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.352s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.532304] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.534536] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.267s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.534791] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.536624] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.583s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.538933] env[68244]: INFO nova.compute.claims [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.575986] env[68244]: INFO nova.scheduler.client.report [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Deleted allocations for instance d1fb6fff-b1b7-4c1b-8995-41628cadf7d5 [ 829.581686] env[68244]: INFO nova.scheduler.client.report [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleted allocations for instance 6abb889a-2e96-4aba-8e36-c4c8997dd4e2 [ 829.673424] env[68244]: DEBUG nova.network.neutron [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Successfully updated port: c231c79b-11e8-4987-8977-587e745b5cbe {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.701443] env[68244]: DEBUG oslo_vmware.api [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780324, 'name': PowerOnVM_Task, 'duration_secs': 0.829689} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.701699] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.701886] env[68244]: INFO nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Took 4.41 seconds to spawn the instance on the hypervisor. [ 829.702073] env[68244]: DEBUG nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.703040] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8489d69-6292-459a-a8ab-e41ce349325a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.772842] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Releasing lock "refresh_cache-340aa1e7-dc0a-4cba-8979-0c591830e9db" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.774054] env[68244]: DEBUG nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Instance network_info: |[{"id": "e3259135-2e40-4236-8149-b4172ef87318", "address": "fa:16:3e:66:1c:e0", "network": {"id": "ea57302f-c973-4dcd-adc8-15a02be98ceb", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1911517092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb00e6745be3453c87b69a7638c20bb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c3e2368-4a35-4aa5-9135-23daedbbf9ef", "external-id": "nsx-vlan-transportzone-125", "segmentation_id": 125, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3259135-2e", "ovs_interfaceid": "e3259135-2e40-4236-8149-b4172ef87318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.774054] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:1c:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c3e2368-4a35-4aa5-9135-23daedbbf9ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3259135-2e40-4236-8149-b4172ef87318', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.781613] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Creating folder: Project (eb00e6745be3453c87b69a7638c20bb8). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.782141] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c1d9441-3ba5-472f-8c7f-6bd570d66975 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.792846] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Created folder: Project (eb00e6745be3453c87b69a7638c20bb8) in parent group-v558876. [ 829.793055] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Creating folder: Instances. Parent ref: group-v558999. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.793327] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35625eae-cf9d-46af-8395-33d4e111e06c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.802827] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Created folder: Instances in parent group-v558999. [ 829.803158] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.803268] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.803469] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-602ff9de-5ef2-4c6d-88f4-691ea94b2740 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.822275] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.822275] env[68244]: value = "task-2780328" [ 829.822275] env[68244]: _type = "Task" [ 829.822275] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.831026] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780328, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.924776] env[68244]: DEBUG oslo_vmware.api [None req-b3f8a5f0-1838-4265-aae8-a0946abc4de8 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780325, 'name': SuspendVM_Task, 'duration_secs': 0.699421} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.925326] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f8a5f0-1838-4265-aae8-a0946abc4de8 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Suspended the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 829.925709] env[68244]: DEBUG nova.compute.manager [None req-b3f8a5f0-1838-4265-aae8-a0946abc4de8 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.926961] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1731ac-d032-4077-9ed9-c5bc22cd10ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.974030] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.033901] env[68244]: DEBUG nova.compute.utils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 830.038187] env[68244]: DEBUG nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 830.038368] env[68244]: DEBUG nova.network.neutron [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.078175] env[68244]: DEBUG nova.policy [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'befd232894ad4fdcbda5dd7aba055aef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd31896823df441cb451756c990f51f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 830.096145] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b5665649-8a2a-4355-adca-c352be19cd06 tempest-ServerMetadataNegativeTestJSON-875894540 tempest-ServerMetadataNegativeTestJSON-875894540-project-member] Lock "d1fb6fff-b1b7-4c1b-8995-41628cadf7d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.282s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.111147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-06e3e853-c1fe-4ecf-a918-80d82cdb2c33 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "6abb889a-2e96-4aba-8e36-c4c8997dd4e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.021s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.176729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.176888] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.177051] env[68244]: DEBUG nova.network.neutron [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.221243] env[68244]: INFO nova.compute.manager [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Took 57.61 seconds to build instance. [ 830.332633] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780328, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.541534] env[68244]: DEBUG nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.723355] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5f38d2f-6872-41a7-8cc5-09753439e8fa tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "828865d7-d06a-4683-9149-987e6d9efbd9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.792s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.819931] env[68244]: DEBUG nova.network.neutron [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Successfully created port: 87739e1f-7578-4f51-abbc-678119d483ee {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.834207] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780328, 'name': CreateVM_Task, 'duration_secs': 0.574297} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.835447] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.836345] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.837264] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.838030] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 830.841607] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-984d4dac-8856-459f-a74f-a7cb0edbf284 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.846960] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 830.846960] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da1b63-775c-ab64-6fcd-f97206d38c0e" [ 830.846960] env[68244]: _type = "Task" [ 830.846960] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.857593] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da1b63-775c-ab64-6fcd-f97206d38c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.870961] env[68244]: DEBUG nova.network.neutron [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.099968] env[68244]: DEBUG nova.network.neutron [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Updating instance_info_cache with network_info: [{"id": "c231c79b-11e8-4987-8977-587e745b5cbe", "address": "fa:16:3e:33:f0:a3", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc231c79b-11", "ovs_interfaceid": "c231c79b-11e8-4987-8977-587e745b5cbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.143533] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad64b8c-f703-42ed-94b9-be64d5de2d74 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.152352] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98892704-72f0-4cae-82bd-aba811503539 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.193121] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6ba479-ccc1-4096-bf45-7be8aaab0358 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.201425] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f14dae-f3e9-43fb-b82d-ff37d744ff1a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.225026] env[68244]: DEBUG nova.compute.provider_tree [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.228745] env[68244]: DEBUG nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 831.357671] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da1b63-775c-ab64-6fcd-f97206d38c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.038082} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.358066] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.358439] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.359130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.359356] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.359702] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.360039] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2edf8e1d-db94-4745-a413-3dc631bbd64e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.369327] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.370061] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.370874] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d238ddb-24e8-4942-a81f-dcb3b25e9a6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.377554] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 831.377554] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f4c596-e5e7-8208-def5-041ce482f480" [ 831.377554] env[68244]: _type = "Task" [ 831.377554] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.385866] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f4c596-e5e7-8208-def5-041ce482f480, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.552395] env[68244]: DEBUG nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.593739] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.594018] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.594178] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.594364] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.594509] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.594652] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.594896] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.595017] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.596093] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.597274] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.597274] env[68244]: DEBUG nova.virt.hardware [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.597724] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385f0252-0efd-49c1-9318-d43a30a8f5e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.602903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.603595] env[68244]: DEBUG nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Instance network_info: |[{"id": "c231c79b-11e8-4987-8977-587e745b5cbe", "address": "fa:16:3e:33:f0:a3", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc231c79b-11", "ovs_interfaceid": "c231c79b-11e8-4987-8977-587e745b5cbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 831.603729] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:f0:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c708997-9b6e-4c27-8a58-02c0d1359d5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c231c79b-11e8-4987-8977-587e745b5cbe', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.611634] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Creating folder: Project (fd31896823df441cb451756c990f51f8). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.612313] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36051299-0575-489d-a0b0-cc052cb6a05b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.618530] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8877052b-4a7b-4d1e-b9c6-10e4daeed5cd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.626047] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Created folder: Project (fd31896823df441cb451756c990f51f8) in parent group-v558876. [ 831.626479] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Creating folder: Instances. Parent ref: group-v559002. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.634655] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89ab6afa-624b-47fe-9611-ed9f2fd6d660 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.645708] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Created folder: Instances in parent group-v559002. [ 831.645708] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 831.645864] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.645977] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b064bf2d-b7c7-4271-97a4-b1b7ab24501c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.664616] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.664616] env[68244]: value = "task-2780331" [ 831.664616] env[68244]: _type = "Task" [ 831.664616] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.675417] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780331, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.727749] env[68244]: DEBUG nova.scheduler.client.report [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.759183] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.828423] env[68244]: DEBUG nova.compute.manager [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Received event network-changed-c231c79b-11e8-4987-8977-587e745b5cbe {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 831.828635] env[68244]: DEBUG nova.compute.manager [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Refreshing instance network info cache due to event network-changed-c231c79b-11e8-4987-8977-587e745b5cbe. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 831.828854] env[68244]: DEBUG oslo_concurrency.lockutils [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] Acquiring lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.829008] env[68244]: DEBUG oslo_concurrency.lockutils [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] Acquired lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.829595] env[68244]: DEBUG nova.network.neutron [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Refreshing network info cache for port c231c79b-11e8-4987-8977-587e745b5cbe {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.890874] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f4c596-e5e7-8208-def5-041ce482f480, 'name': SearchDatastore_Task, 'duration_secs': 0.00907} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.892324] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f90a510-a212-4a4c-a018-35eb3cd6ebe3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.899541] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 831.899541] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528bec6b-f6ee-5494-f3fe-e0ee15388620" [ 831.899541] env[68244]: _type = "Task" [ 831.899541] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.908145] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528bec6b-f6ee-5494-f3fe-e0ee15388620, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.177913] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780331, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.232680] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.233101] env[68244]: DEBUG nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 832.237045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.582s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.238656] env[68244]: INFO nova.compute.claims [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.410538] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528bec6b-f6ee-5494-f3fe-e0ee15388620, 'name': SearchDatastore_Task, 'duration_secs': 0.019658} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.410814] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.411083] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 340aa1e7-dc0a-4cba-8979-0c591830e9db/340aa1e7-dc0a-4cba-8979-0c591830e9db.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.411343] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-250ec141-456b-4336-a428-d0c6a6120f1b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.418180] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 832.418180] env[68244]: value = "task-2780332" [ 832.418180] env[68244]: _type = "Task" [ 832.418180] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.429356] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.611697] env[68244]: DEBUG nova.network.neutron [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Updated VIF entry in instance network info cache for port c231c79b-11e8-4987-8977-587e745b5cbe. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.612079] env[68244]: DEBUG nova.network.neutron [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Updating instance_info_cache with network_info: [{"id": "c231c79b-11e8-4987-8977-587e745b5cbe", "address": "fa:16:3e:33:f0:a3", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc231c79b-11", "ovs_interfaceid": "c231c79b-11e8-4987-8977-587e745b5cbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.683035] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780331, 'name': CreateVM_Task, 'duration_secs': 0.667246} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.683135] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.683836] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.684013] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.684349] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.684894] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdddcf8a-b936-49f8-bbfa-776433e1c2bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.691408] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 832.691408] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5295ac64-36de-0971-48b1-8d199e1525a6" [ 832.691408] env[68244]: _type = "Task" [ 832.691408] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.711708] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5295ac64-36de-0971-48b1-8d199e1525a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.738701] env[68244]: DEBUG nova.compute.utils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 832.740402] env[68244]: DEBUG nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 832.740529] env[68244]: DEBUG nova.network.neutron [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.784818] env[68244]: DEBUG nova.policy [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'befd232894ad4fdcbda5dd7aba055aef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd31896823df441cb451756c990f51f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 832.929055] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780332, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.947100] env[68244]: DEBUG nova.network.neutron [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Successfully updated port: 87739e1f-7578-4f51-abbc-678119d483ee {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.954732] env[68244]: DEBUG nova.compute.manager [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 832.955648] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258843f0-f1f8-46ac-82db-c908f4793690 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.083885] env[68244]: DEBUG nova.network.neutron [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Successfully created port: 7179ec75-5350-44a8-97c4-c2a8d408a496 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.115094] env[68244]: DEBUG oslo_concurrency.lockutils [req-6458e1ad-a105-4568-beda-f2bcb5e47049 req-e3b00350-dfd7-45bb-b000-054a220e59c3 service nova] Releasing lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.201857] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5295ac64-36de-0971-48b1-8d199e1525a6, 'name': SearchDatastore_Task, 'duration_secs': 0.051234} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.202206] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.202445] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.202680] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.202864] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.202998] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.203276] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38a57fc2-d147-410a-9d91-182ada3945ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.213046] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.213046] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.213046] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ba9aa9c-d289-42b1-befa-35f1436b277b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.219692] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 833.219692] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a3e0d-7379-b7f7-d2c7-9dda9b95fb74" [ 833.219692] env[68244]: _type = "Task" [ 833.219692] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.228298] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a3e0d-7379-b7f7-d2c7-9dda9b95fb74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.244090] env[68244]: DEBUG nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 833.430615] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511088} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.430867] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 340aa1e7-dc0a-4cba-8979-0c591830e9db/340aa1e7-dc0a-4cba-8979-0c591830e9db.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 833.431092] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 833.431339] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6158d9b2-7bad-4137-a865-9a9747138d19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.438414] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 833.438414] env[68244]: value = "task-2780333" [ 833.438414] env[68244]: _type = "Task" [ 833.438414] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.448282] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780333, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.451064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "refresh_cache-085b318d-e704-46f9-89a6-679b8aa49f85" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.451064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "refresh_cache-085b318d-e704-46f9-89a6-679b8aa49f85" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.451246] env[68244]: DEBUG nova.network.neutron [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.467440] env[68244]: INFO nova.compute.manager [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] instance snapshotting [ 833.467796] env[68244]: WARNING nova.compute.manager [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 833.476603] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711a6c62-de4a-45c7-ba29-d0e5d84f983b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.498706] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22502449-1824-4be6-b62d-2f44c0a16690 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.729888] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a3e0d-7379-b7f7-d2c7-9dda9b95fb74, 'name': SearchDatastore_Task, 'duration_secs': 0.009328} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.732987] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fe9afbf-1a30-4711-a25b-318a348271eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.738324] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 833.738324] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e90505-2237-7a8c-762e-1e82c0f1792d" [ 833.738324] env[68244]: _type = "Task" [ 833.738324] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.745986] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e90505-2237-7a8c-762e-1e82c0f1792d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.803387] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb92eed9-a7db-4b34-ad21-387ca634c6d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.811605] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4278a9-9ae3-43a5-99b0-8a599a0d831f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.841937] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecec3f23-619f-428d-948d-67410fae041e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.850749] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b62ba19-a756-4136-a27c-6b88d807a991 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.858606] env[68244]: DEBUG nova.compute.manager [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Received event network-changed-e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 833.858797] env[68244]: DEBUG nova.compute.manager [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Refreshing instance network info cache due to event network-changed-e14972f2-13d5-417d-9c9b-9a0f731e4e44. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 833.859009] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Acquiring lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.859158] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Acquired lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.859313] env[68244]: DEBUG nova.network.neutron [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Refreshing network info cache for port e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.868095] env[68244]: DEBUG nova.compute.provider_tree [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.948484] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780333, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071421} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.951069] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.951862] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc430e5c-621c-4977-b76f-ce3929f48ede {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.966834] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "086dda59-4bd2-4ca2-a758-c120f1271f42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.966834] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.966987] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "086dda59-4bd2-4ca2-a758-c120f1271f42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.967218] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.967417] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.977535] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 340aa1e7-dc0a-4cba-8979-0c591830e9db/340aa1e7-dc0a-4cba-8979-0c591830e9db.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.977890] env[68244]: INFO nova.compute.manager [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Terminating instance [ 833.979531] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e82ed922-ee9c-405f-b977-63cb2c9f588b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.000660] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 834.000660] env[68244]: value = "task-2780334" [ 834.000660] env[68244]: _type = "Task" [ 834.000660] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.011236] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 834.011515] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780334, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.013767] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-87d15fe3-3b5c-4fdc-855b-3bb2db3d9373 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.019529] env[68244]: DEBUG nova.network.neutron [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.022723] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 834.022723] env[68244]: value = "task-2780335" [ 834.022723] env[68244]: _type = "Task" [ 834.022723] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.031065] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780335, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.080908] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.081211] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.081456] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.081641] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.081823] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.084544] env[68244]: INFO nova.compute.manager [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Terminating instance [ 834.233949] env[68244]: DEBUG nova.network.neutron [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updated VIF entry in instance network info cache for port e14972f2-13d5-417d-9c9b-9a0f731e4e44. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.234428] env[68244]: DEBUG nova.network.neutron [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updating instance_info_cache with network_info: [{"id": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "address": "fa:16:3e:c3:fd:a7", "network": {"id": "0e6fc214-45cf-4ada-83ee-0f7b293ea9f8", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-951457791-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ade72f4f2a384ddb997b5d0f33afc5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14972f2-13", "ovs_interfaceid": "e14972f2-13d5-417d-9c9b-9a0f731e4e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.249068] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e90505-2237-7a8c-762e-1e82c0f1792d, 'name': SearchDatastore_Task, 'duration_secs': 0.010134} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.249400] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.249665] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b0b79f25-f97d-4d59-ae80-2f8c09201073/b0b79f25-f97d-4d59-ae80-2f8c09201073.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.250017] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58b869df-c98e-4f75-992d-232d3649ce15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.253244] env[68244]: DEBUG nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 834.263111] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 834.263111] env[68244]: value = "task-2780336" [ 834.263111] env[68244]: _type = "Task" [ 834.263111] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.267992] env[68244]: DEBUG nova.network.neutron [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Updating instance_info_cache with network_info: [{"id": "87739e1f-7578-4f51-abbc-678119d483ee", "address": "fa:16:3e:2c:33:e9", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87739e1f-75", "ovs_interfaceid": "87739e1f-7578-4f51-abbc-678119d483ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.274699] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.282843] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.283112] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.283280] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.283468] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.283623] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.283777] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.283996] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.284182] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.284358] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.284528] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.284710] env[68244]: DEBUG nova.virt.hardware [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.285799] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d74b94-a0b5-4895-aeb6-437460684eb2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.293345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd6ed32-268e-4b1a-a4e9-56494883b3bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.400726] env[68244]: ERROR nova.scheduler.client.report [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [req-f673f8b9-cc29-430d-8a44-afa222ad37a4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f673f8b9-cc29-430d-8a44-afa222ad37a4"}]} [ 834.423425] env[68244]: DEBUG nova.scheduler.client.report [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 834.443702] env[68244]: DEBUG nova.scheduler.client.report [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 834.443927] env[68244]: DEBUG nova.compute.provider_tree [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.464455] env[68244]: DEBUG nova.scheduler.client.report [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 834.485403] env[68244]: DEBUG nova.scheduler.client.report [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 834.499162] env[68244]: DEBUG nova.compute.manager [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.499162] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.499162] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf6f829-eda4-4c50-bf6f-3a6ddc3f4f60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.511859] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780334, 'name': ReconfigVM_Task, 'duration_secs': 0.328382} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.515057] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 340aa1e7-dc0a-4cba-8979-0c591830e9db/340aa1e7-dc0a-4cba-8979-0c591830e9db.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.516295] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.516776] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be221a5c-6a98-49d7-9b77-b7ea403c6201 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.518919] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0d72369-b1fb-49a4-99d8-c9b11bbc234b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.531947] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 834.531947] env[68244]: value = "task-2780337" [ 834.531947] env[68244]: _type = "Task" [ 834.531947] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.533456] env[68244]: DEBUG oslo_vmware.api [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 834.533456] env[68244]: value = "task-2780338" [ 834.533456] env[68244]: _type = "Task" [ 834.533456] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.550560] env[68244]: DEBUG nova.compute.manager [req-4a084cf6-d6bb-4d3c-9cbc-d796bafafe0d req-d601eba6-13b6-49b3-8995-b112b7cb6142 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Received event network-vif-plugged-7179ec75-5350-44a8-97c4-c2a8d408a496 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 834.550718] env[68244]: DEBUG oslo_concurrency.lockutils [req-4a084cf6-d6bb-4d3c-9cbc-d796bafafe0d req-d601eba6-13b6-49b3-8995-b112b7cb6142 service nova] Acquiring lock "6915d271-8346-41b5-a75b-2188fd3b57d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.550905] env[68244]: DEBUG oslo_concurrency.lockutils [req-4a084cf6-d6bb-4d3c-9cbc-d796bafafe0d req-d601eba6-13b6-49b3-8995-b112b7cb6142 service nova] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.551191] env[68244]: DEBUG oslo_concurrency.lockutils [req-4a084cf6-d6bb-4d3c-9cbc-d796bafafe0d req-d601eba6-13b6-49b3-8995-b112b7cb6142 service nova] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.551315] env[68244]: DEBUG nova.compute.manager [req-4a084cf6-d6bb-4d3c-9cbc-d796bafafe0d req-d601eba6-13b6-49b3-8995-b112b7cb6142 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] No waiting events found dispatching network-vif-plugged-7179ec75-5350-44a8-97c4-c2a8d408a496 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.551560] env[68244]: WARNING nova.compute.manager [req-4a084cf6-d6bb-4d3c-9cbc-d796bafafe0d req-d601eba6-13b6-49b3-8995-b112b7cb6142 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Received unexpected event network-vif-plugged-7179ec75-5350-44a8-97c4-c2a8d408a496 for instance with vm_state building and task_state spawning. [ 834.552217] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780335, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.558456] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780337, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.562181] env[68244]: DEBUG oslo_vmware.api [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.588833] env[68244]: DEBUG nova.compute.manager [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.589339] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.590950] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3deef23-5597-4bd7-82cf-368fcdbb102b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.602306] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.602597] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e49664c6-0cfd-4167-a37d-df913f4936e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.609598] env[68244]: DEBUG oslo_vmware.api [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 834.609598] env[68244]: value = "task-2780339" [ 834.609598] env[68244]: _type = "Task" [ 834.609598] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.621185] env[68244]: DEBUG oslo_vmware.api [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.654929] env[68244]: DEBUG nova.network.neutron [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Successfully updated port: 7179ec75-5350-44a8-97c4-c2a8d408a496 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.744054] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Releasing lock "refresh_cache-828865d7-d06a-4683-9149-987e6d9efbd9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.744534] env[68244]: DEBUG nova.compute.manager [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Received event network-vif-plugged-87739e1f-7578-4f51-abbc-678119d483ee {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 834.744879] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Acquiring lock "085b318d-e704-46f9-89a6-679b8aa49f85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.745113] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Lock "085b318d-e704-46f9-89a6-679b8aa49f85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.745773] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Lock "085b318d-e704-46f9-89a6-679b8aa49f85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.745773] env[68244]: DEBUG nova.compute.manager [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] No waiting events found dispatching network-vif-plugged-87739e1f-7578-4f51-abbc-678119d483ee {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.745982] env[68244]: WARNING nova.compute.manager [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Received unexpected event network-vif-plugged-87739e1f-7578-4f51-abbc-678119d483ee for instance with vm_state building and task_state spawning. [ 834.745982] env[68244]: DEBUG nova.compute.manager [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Received event network-changed-87739e1f-7578-4f51-abbc-678119d483ee {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 834.746208] env[68244]: DEBUG nova.compute.manager [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Refreshing instance network info cache due to event network-changed-87739e1f-7578-4f51-abbc-678119d483ee. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 834.746382] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Acquiring lock "refresh_cache-085b318d-e704-46f9-89a6-679b8aa49f85" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.770244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "refresh_cache-085b318d-e704-46f9-89a6-679b8aa49f85" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.770562] env[68244]: DEBUG nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Instance network_info: |[{"id": "87739e1f-7578-4f51-abbc-678119d483ee", "address": "fa:16:3e:2c:33:e9", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87739e1f-75", "ovs_interfaceid": "87739e1f-7578-4f51-abbc-678119d483ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 834.770867] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Acquired lock "refresh_cache-085b318d-e704-46f9-89a6-679b8aa49f85" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.771178] env[68244]: DEBUG nova.network.neutron [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Refreshing network info cache for port 87739e1f-7578-4f51-abbc-678119d483ee {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.772269] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:33:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c708997-9b6e-4c27-8a58-02c0d1359d5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87739e1f-7578-4f51-abbc-678119d483ee', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.780228] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 834.784249] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.784538] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780336, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492274} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.788750] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-854daa31-138d-442c-96f5-a7d680fc0548 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.804548] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b0b79f25-f97d-4d59-ae80-2f8c09201073/b0b79f25-f97d-4d59-ae80-2f8c09201073.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.804818] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.805735] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f454dbe-a96c-41e2-90cc-36caacefb912 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.812815] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.812815] env[68244]: value = "task-2780340" [ 834.812815] env[68244]: _type = "Task" [ 834.812815] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.822442] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 834.822442] env[68244]: value = "task-2780341" [ 834.822442] env[68244]: _type = "Task" [ 834.822442] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.830697] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780340, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.835797] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780341, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.036082] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780335, 'name': CreateSnapshot_Task, 'duration_secs': 0.813953} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.044527] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 835.045640] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60a0331-5105-4886-a11c-5a993faae3d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.052902] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780337, 'name': Rename_Task, 'duration_secs': 0.278626} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.059056] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.059375] env[68244]: DEBUG oslo_vmware.api [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780338, 'name': PowerOffVM_Task, 'duration_secs': 0.432328} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.064138] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ad4718d-f8cc-404a-b792-f0765717e941 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.065979] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.065979] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.066589] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25c1c817-bc4b-43de-9f44-33975aed62bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.075852] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 835.075852] env[68244]: value = "task-2780342" [ 835.075852] env[68244]: _type = "Task" [ 835.075852] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.085278] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780342, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.106631] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc8c5ff-7ca4-4725-9ebe-a9418b66b31b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.117837] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c78709-0f29-4091-b183-bc422b85a0c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.127019] env[68244]: DEBUG oslo_vmware.api [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780339, 'name': PowerOffVM_Task, 'duration_secs': 0.293514} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.153437] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.153675] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.155712] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58c9b01e-5282-421e-9ecd-61890b70898e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.157927] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621534de-7a38-4209-b1c4-8717e7a2a7b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.160624] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.160827] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.161066] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleting the datastore file [datastore2] 086dda59-4bd2-4ca2-a758-c120f1271f42 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.161570] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "refresh_cache-6915d271-8346-41b5-a75b-2188fd3b57d1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.161759] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "refresh_cache-6915d271-8346-41b5-a75b-2188fd3b57d1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.161836] env[68244]: DEBUG nova.network.neutron [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.162914] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-facf63eb-be26-4492-9b39-91e6fb32838b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.182654] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c72dec6-f1c1-4bee-8b21-26ac006d0cf1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.187604] env[68244]: DEBUG oslo_vmware.api [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 835.187604] env[68244]: value = "task-2780345" [ 835.187604] env[68244]: _type = "Task" [ 835.187604] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.200947] env[68244]: DEBUG nova.compute.provider_tree [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 835.207958] env[68244]: DEBUG oslo_vmware.api [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.258362] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.258676] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.258797] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleting the datastore file [datastore2] 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.259050] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32382fc2-ab82-4b4e-841f-0115082b3161 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.265061] env[68244]: DEBUG oslo_vmware.api [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for the task: (returnval){ [ 835.265061] env[68244]: value = "task-2780346" [ 835.265061] env[68244]: _type = "Task" [ 835.265061] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.273373] env[68244]: DEBUG oslo_vmware.api [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.324770] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780340, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.332659] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780341, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079574} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.333369] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.333766] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0af304e-6e9e-465d-b6a6-034f2dd3722c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.358137] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] b0b79f25-f97d-4d59-ae80-2f8c09201073/b0b79f25-f97d-4d59-ae80-2f8c09201073.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.360896] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-935a4627-84ac-47b0-8653-175b64a47348 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.379864] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 835.379864] env[68244]: value = "task-2780347" [ 835.379864] env[68244]: _type = "Task" [ 835.379864] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.387566] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.554863] env[68244]: DEBUG nova.network.neutron [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Updated VIF entry in instance network info cache for port 87739e1f-7578-4f51-abbc-678119d483ee. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.555157] env[68244]: DEBUG nova.network.neutron [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Updating instance_info_cache with network_info: [{"id": "87739e1f-7578-4f51-abbc-678119d483ee", "address": "fa:16:3e:2c:33:e9", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87739e1f-75", "ovs_interfaceid": "87739e1f-7578-4f51-abbc-678119d483ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.574562] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 835.575326] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fce8d942-cda5-4781-92c3-e53896cb6feb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.593691] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780342, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.595250] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 835.595250] env[68244]: value = "task-2780348" [ 835.595250] env[68244]: _type = "Task" [ 835.595250] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.603486] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780348, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.697744] env[68244]: DEBUG oslo_vmware.api [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192478} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.698096] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.698238] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.698406] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.698582] env[68244]: INFO nova.compute.manager [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Took 1.20 seconds to destroy the instance on the hypervisor. [ 835.698873] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 835.699707] env[68244]: DEBUG nova.network.neutron [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.701611] env[68244]: DEBUG nova.compute.manager [-] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 835.701718] env[68244]: DEBUG nova.network.neutron [-] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.739550] env[68244]: DEBUG nova.scheduler.client.report [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 74 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 835.739912] env[68244]: DEBUG nova.compute.provider_tree [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 74 to 75 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 835.740151] env[68244]: DEBUG nova.compute.provider_tree [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 835.774168] env[68244]: DEBUG oslo_vmware.api [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Task: {'id': task-2780346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163006} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.777920] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.777920] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.777920] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.778107] env[68244]: INFO nova.compute.manager [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 835.778641] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 835.778787] env[68244]: DEBUG nova.compute.manager [-] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 835.778900] env[68244]: DEBUG nova.network.neutron [-] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.825779] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780340, 'name': CreateVM_Task, 'duration_secs': 0.641144} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.825985] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 835.826631] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.826815] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.827285] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 835.827786] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa1fa943-a2e4-4495-8880-5a0a83248787 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.832041] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 835.832041] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527198bd-6ce3-5ba0-6560-8e6fc818994e" [ 835.832041] env[68244]: _type = "Task" [ 835.832041] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.840028] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527198bd-6ce3-5ba0-6560-8e6fc818994e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.847828] env[68244]: DEBUG nova.network.neutron [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Updating instance_info_cache with network_info: [{"id": "7179ec75-5350-44a8-97c4-c2a8d408a496", "address": "fa:16:3e:7a:1e:54", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7179ec75-53", "ovs_interfaceid": "7179ec75-5350-44a8-97c4-c2a8d408a496", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.890667] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.058396] env[68244]: DEBUG oslo_concurrency.lockutils [req-7f0c96be-86ae-4ee9-9426-281904f5b16c req-cf586c4b-2d52-4c4f-84a4-92a4576ee0f2 service nova] Releasing lock "refresh_cache-085b318d-e704-46f9-89a6-679b8aa49f85" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.086976] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780342, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.106308] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780348, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.247742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.011s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.248301] env[68244]: DEBUG nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.251513] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.742s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.251749] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.253865] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.534s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.255679] env[68244]: INFO nova.compute.claims [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.281245] env[68244]: INFO nova.scheduler.client.report [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Deleted allocations for instance 3776b39a-d10b-4068-8b4b-5dc25798e088 [ 836.315124] env[68244]: DEBUG nova.compute.manager [req-ba87a57b-5d42-465b-970e-406fa429689d req-ac9591c6-5637-4d97-b25b-b5b17fe5e5db service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Received event network-vif-deleted-060a13a5-3b77-45b8-9522-05b2eb9e0e12 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 836.315342] env[68244]: INFO nova.compute.manager [req-ba87a57b-5d42-465b-970e-406fa429689d req-ac9591c6-5637-4d97-b25b-b5b17fe5e5db service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Neutron deleted interface 060a13a5-3b77-45b8-9522-05b2eb9e0e12; detaching it from the instance and deleting it from the info cache [ 836.315517] env[68244]: DEBUG nova.network.neutron [req-ba87a57b-5d42-465b-970e-406fa429689d req-ac9591c6-5637-4d97-b25b-b5b17fe5e5db service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.343336] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527198bd-6ce3-5ba0-6560-8e6fc818994e, 'name': SearchDatastore_Task, 'duration_secs': 0.024692} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.343720] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.343903] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 836.344139] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.344283] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.344459] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 836.344987] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93f9dc99-ce92-4e91-b34f-1bd4f7499206 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.352557] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "refresh_cache-6915d271-8346-41b5-a75b-2188fd3b57d1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.352818] env[68244]: DEBUG nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Instance network_info: |[{"id": "7179ec75-5350-44a8-97c4-c2a8d408a496", "address": "fa:16:3e:7a:1e:54", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7179ec75-53", "ovs_interfaceid": "7179ec75-5350-44a8-97c4-c2a8d408a496", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 836.354529] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:1e:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c708997-9b6e-4c27-8a58-02c0d1359d5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7179ec75-5350-44a8-97c4-c2a8d408a496', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.362610] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.362610] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 836.362704] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 836.363670] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.363862] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-679d1586-604a-4038-a362-7dcbfda51890 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.366757] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-557833e8-5f9e-4c95-8882-25ec8d01510b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.387713] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 836.387713] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527addde-e6fa-adfa-84cc-129b314cfd33" [ 836.387713] env[68244]: _type = "Task" [ 836.387713] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.394946] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.394946] env[68244]: value = "task-2780349" [ 836.394946] env[68244]: _type = "Task" [ 836.394946] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.396024] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780347, 'name': ReconfigVM_Task, 'duration_secs': 0.625215} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.396024] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Reconfigured VM instance instance-0000002b to attach disk [datastore2] b0b79f25-f97d-4d59-ae80-2f8c09201073/b0b79f25-f97d-4d59-ae80-2f8c09201073.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.399310] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a61562ca-f176-45cc-a6ea-b8a7bf4e2974 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.404101] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527addde-e6fa-adfa-84cc-129b314cfd33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.409801] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780349, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.411026] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 836.411026] env[68244]: value = "task-2780350" [ 836.411026] env[68244]: _type = "Task" [ 836.411026] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.419538] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780350, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.576134] env[68244]: DEBUG nova.compute.manager [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Received event network-changed-7179ec75-5350-44a8-97c4-c2a8d408a496 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 836.576385] env[68244]: DEBUG nova.compute.manager [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Refreshing instance network info cache due to event network-changed-7179ec75-5350-44a8-97c4-c2a8d408a496. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 836.576643] env[68244]: DEBUG oslo_concurrency.lockutils [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] Acquiring lock "refresh_cache-6915d271-8346-41b5-a75b-2188fd3b57d1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.576797] env[68244]: DEBUG oslo_concurrency.lockutils [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] Acquired lock "refresh_cache-6915d271-8346-41b5-a75b-2188fd3b57d1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.576994] env[68244]: DEBUG nova.network.neutron [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Refreshing network info cache for port 7179ec75-5350-44a8-97c4-c2a8d408a496 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.588863] env[68244]: DEBUG oslo_vmware.api [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780342, 'name': PowerOnVM_Task, 'duration_secs': 1.070328} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.589835] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.590103] env[68244]: INFO nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Took 10.44 seconds to spawn the instance on the hypervisor. [ 836.590332] env[68244]: DEBUG nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.591151] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa45170-3f60-48a6-a314-0d1f4075c902 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.609703] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780348, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.707032] env[68244]: DEBUG nova.network.neutron [-] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.753357] env[68244]: DEBUG nova.compute.utils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.755103] env[68244]: DEBUG nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.755362] env[68244]: DEBUG nova.network.neutron [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.775776] env[68244]: DEBUG nova.network.neutron [-] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.789428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f1199efb-2b83-4eb3-8699-f051fa451c9b tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "3776b39a-d10b-4068-8b4b-5dc25798e088" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.848s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.814128] env[68244]: DEBUG nova.policy [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '918658919a8c4d4e888f9a63053e5ffe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e80cfa81cd442f9af3bf027b9059123', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 836.818890] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29ddfc92-f115-4cc9-a8dd-edd0e76a749b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.827915] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb5e932-39d3-42d9-b588-ab9ff639143a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.862015] env[68244]: DEBUG nova.compute.manager [req-ba87a57b-5d42-465b-970e-406fa429689d req-ac9591c6-5637-4d97-b25b-b5b17fe5e5db service nova] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Detach interface failed, port_id=060a13a5-3b77-45b8-9522-05b2eb9e0e12, reason: Instance 086dda59-4bd2-4ca2-a758-c120f1271f42 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 836.898683] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527addde-e6fa-adfa-84cc-129b314cfd33, 'name': SearchDatastore_Task, 'duration_secs': 0.014551} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.904532] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b8bcbc6-2f95-4f61-9f75-fbe9ccd0c26a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.913826] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780349, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.918097] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 836.918097] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b132b2-39ea-53a3-6ee8-8b640e147469" [ 836.918097] env[68244]: _type = "Task" [ 836.918097] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.924525] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780350, 'name': Rename_Task, 'duration_secs': 0.137633} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.925152] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.925398] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2be62764-d6bd-4eb1-b129-d6ed75244083 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.930518] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b132b2-39ea-53a3-6ee8-8b640e147469, 'name': SearchDatastore_Task, 'duration_secs': 0.012177} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.931138] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.931943] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 085b318d-e704-46f9-89a6-679b8aa49f85/085b318d-e704-46f9-89a6-679b8aa49f85.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.931943] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16fc7d1b-7403-4b5f-a23e-102f29ddeb46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.934967] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 836.934967] env[68244]: value = "task-2780351" [ 836.934967] env[68244]: _type = "Task" [ 836.934967] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.939699] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 836.939699] env[68244]: value = "task-2780352" [ 836.939699] env[68244]: _type = "Task" [ 836.939699] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.945809] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780351, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.950694] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.112018] env[68244]: INFO nova.compute.manager [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Took 63.57 seconds to build instance. [ 837.117608] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780348, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.174377] env[68244]: DEBUG nova.network.neutron [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Successfully created port: 62558620-9882-40c5-888d-85620dd9c6d9 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.209037] env[68244]: INFO nova.compute.manager [-] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Took 1.51 seconds to deallocate network for instance. [ 837.259104] env[68244]: DEBUG nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 837.278359] env[68244]: INFO nova.compute.manager [-] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Took 1.50 seconds to deallocate network for instance. [ 837.417937] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780349, 'name': CreateVM_Task, 'duration_secs': 0.559873} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.417937] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 837.418376] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.418421] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.418753] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 837.419044] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-728c6eb8-32a2-4c8a-8b13-5993badc73b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.429228] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 837.429228] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c0f6ab-1bb7-1130-5d0f-608ca47905e2" [ 837.429228] env[68244]: _type = "Task" [ 837.429228] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.438034] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c0f6ab-1bb7-1130-5d0f-608ca47905e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.458257] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780351, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.464944] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780352, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.612719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-63c960c3-1cb0-48cc-8a6f-c81a29c161ad tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.269s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.613229] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780348, 'name': CloneVM_Task, 'duration_secs': 1.61559} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.614266] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Created linked-clone VM from snapshot [ 837.615629] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2ed331-34d7-484f-924a-5c942c546392 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.627836] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Uploading image e7140df0-f9b6-40bd-8576-e37095b62021 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 837.664422] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 837.664422] env[68244]: value = "vm-559007" [ 837.664422] env[68244]: _type = "VirtualMachine" [ 837.664422] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 837.664832] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ccfadca3-424f-4af4-81ef-bdb47923a06e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.682623] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease: (returnval){ [ 837.682623] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ab665-610e-8153-97d2-5b8535ae00ad" [ 837.682623] env[68244]: _type = "HttpNfcLease" [ 837.682623] env[68244]: } obtained for exporting VM: (result){ [ 837.682623] env[68244]: value = "vm-559007" [ 837.682623] env[68244]: _type = "VirtualMachine" [ 837.682623] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 837.682623] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the lease: (returnval){ [ 837.682623] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ab665-610e-8153-97d2-5b8535ae00ad" [ 837.682623] env[68244]: _type = "HttpNfcLease" [ 837.682623] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 837.694662] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 837.694662] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ab665-610e-8153-97d2-5b8535ae00ad" [ 837.694662] env[68244]: _type = "HttpNfcLease" [ 837.694662] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 837.715336] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.785502] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.843577] env[68244]: DEBUG nova.network.neutron [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Updated VIF entry in instance network info cache for port 7179ec75-5350-44a8-97c4-c2a8d408a496. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.843939] env[68244]: DEBUG nova.network.neutron [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Updating instance_info_cache with network_info: [{"id": "7179ec75-5350-44a8-97c4-c2a8d408a496", "address": "fa:16:3e:7a:1e:54", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7179ec75-53", "ovs_interfaceid": "7179ec75-5350-44a8-97c4-c2a8d408a496", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.538033] env[68244]: DEBUG nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.539887] env[68244]: DEBUG nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.542146] env[68244]: DEBUG oslo_concurrency.lockutils [req-73b19ab8-11bc-49b9-a7a3-31c7794c7183 req-c3af3586-f1f8-422b-b260-55b51c2dc6a5 service nova] Releasing lock "refresh_cache-6915d271-8346-41b5-a75b-2188fd3b57d1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.544977] env[68244]: DEBUG nova.compute.manager [req-5c09d38c-c874-4301-8469-7db4c88ebe91 req-864f9e65-2b26-422d-9615-6e8304e51306 service nova] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Received event network-vif-deleted-61c4e228-79c7-4531-bcb1-6cf1bed2010e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 838.545804] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f67b8c1-16b7-4cac-abab-33829303d5d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.578069] env[68244]: DEBUG oslo_vmware.api [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780351, 'name': PowerOnVM_Task, 'duration_secs': 0.846326} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.578393] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c0f6ab-1bb7-1130-5d0f-608ca47905e2, 'name': SearchDatastore_Task, 'duration_secs': 0.025414} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.580982] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.581200] env[68244]: INFO nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Took 9.71 seconds to spawn the instance on the hypervisor. [ 838.581375] env[68244]: DEBUG nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.582370] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c4d9e0-85ca-4b89-bf38-4d63c5933172 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.585685] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.585900] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.586141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.586293] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.586472] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.591658] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303e1326-e110-442a-ae2a-3ef965b80fe3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.594207] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08918f0b-e201-4806-82fd-cffc53e5387e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.596454] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 838.596454] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ab665-610e-8153-97d2-5b8535ae00ad" [ 838.596454] env[68244]: _type = "HttpNfcLease" [ 838.596454] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 838.596454] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552997} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.598453] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.598670] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.598824] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.599014] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.599167] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.599313] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.599518] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.599674] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.599837] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.599996] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.600185] env[68244]: DEBUG nova.virt.hardware [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.600819] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 838.600819] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ab665-610e-8153-97d2-5b8535ae00ad" [ 838.600819] env[68244]: _type = "HttpNfcLease" [ 838.600819] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 838.601049] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 085b318d-e704-46f9-89a6-679b8aa49f85/085b318d-e704-46f9-89a6-679b8aa49f85.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.601250] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.602196] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485cbe5b-3ae5-4791-ba61-04751b01ef8c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.628809] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ea8b3e-e991-47e0-81a3-9ea788d3b703 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.631390] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5bf5487-8a76-4b1b-a47f-e470b942bb23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.637342] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64976048-a2f1-489e-b729-34f39138a240 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.645462] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.645638] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.650760] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b38fd8-4879-4ad1-aeeb-fb4f8dc91aa1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.654574] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54b260b6-634c-4631-b12b-47997d4e9831 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.656961] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1d140-dd8a-f46a-0f02-6b2bbde44d1f/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 838.658440] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1d140-dd8a-f46a-0f02-6b2bbde44d1f/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 838.661900] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 838.661900] env[68244]: value = "task-2780354" [ 838.661900] env[68244]: _type = "Task" [ 838.661900] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.664812] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba46f66-3d1c-4b58-a7fc-fb5fe719e05e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.734324] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 838.734324] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5298ee4e-7b6e-46c5-1b9a-e30ddb4a94a5" [ 838.734324] env[68244]: _type = "Task" [ 838.734324] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.751961] env[68244]: DEBUG nova.compute.provider_tree [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.753306] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780354, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065127} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.753910] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 838.754987] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b842d416-981c-4dc1-9085-2ac19a79d588 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.760848] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5298ee4e-7b6e-46c5-1b9a-e30ddb4a94a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01989} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.761955] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18bb0022-dc37-444c-af27-d261e21c570a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.784933] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 085b318d-e704-46f9-89a6-679b8aa49f85/085b318d-e704-46f9-89a6-679b8aa49f85.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 838.789025] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb9b6149-859f-467a-84a1-23fa117caf23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.802135] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 838.802135] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521bc307-1711-7ac6-b7ab-663138e55503" [ 838.802135] env[68244]: _type = "Task" [ 838.802135] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.810622] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521bc307-1711-7ac6-b7ab-663138e55503, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.811876] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 838.811876] env[68244]: value = "task-2780355" [ 838.811876] env[68244]: _type = "Task" [ 838.811876] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.850094] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bf3b51d5-3500-478f-aefd-c9590023a126 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.052960] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "340aa1e7-dc0a-4cba-8979-0c591830e9db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.053396] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.053713] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "340aa1e7-dc0a-4cba-8979-0c591830e9db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.054284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.054284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.062567] env[68244]: INFO nova.compute.manager [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Terminating instance [ 839.082388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.159157] env[68244]: INFO nova.compute.manager [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Took 59.59 seconds to build instance. [ 839.255839] env[68244]: DEBUG nova.scheduler.client.report [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 839.322720] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521bc307-1711-7ac6-b7ab-663138e55503, 'name': SearchDatastore_Task, 'duration_secs': 0.030139} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.324768] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.325375] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 6915d271-8346-41b5-a75b-2188fd3b57d1/6915d271-8346-41b5-a75b-2188fd3b57d1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.328648] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd1f75dc-5992-4fd3-bde6-908a9b5d7448 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.331531] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780355, 'name': ReconfigVM_Task, 'duration_secs': 0.368221} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.331531] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 085b318d-e704-46f9-89a6-679b8aa49f85/085b318d-e704-46f9-89a6-679b8aa49f85.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.332516] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c163c0d-098c-440f-83bd-44dc7f21b4e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.337594] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 839.337594] env[68244]: value = "task-2780356" [ 839.337594] env[68244]: _type = "Task" [ 839.337594] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.342890] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 839.342890] env[68244]: value = "task-2780357" [ 839.342890] env[68244]: _type = "Task" [ 839.342890] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.352097] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.357037] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780357, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.518353] env[68244]: DEBUG nova.network.neutron [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Successfully updated port: 62558620-9882-40c5-888d-85620dd9c6d9 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.567059] env[68244]: DEBUG nova.compute.manager [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 839.568055] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 839.568886] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffbd83d-efec-4b56-b8f1-9105f81b5166 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.576991] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.577326] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f522b89d-b642-450e-bb84-ff5d606a643a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.583359] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 839.583359] env[68244]: value = "task-2780358" [ 839.583359] env[68244]: _type = "Task" [ 839.583359] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.591419] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.661645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-db3d3368-d6ca-4172-88a3-144837ae80db tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.311s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.761494] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.507s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.762107] env[68244]: DEBUG nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 839.765229] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.625s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.766724] env[68244]: INFO nova.compute.claims [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.847559] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.856644] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780357, 'name': Rename_Task, 'duration_secs': 0.14947} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.856928] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.857228] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47cdd413-075f-4acb-a2bb-79d5360a3338 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.863874] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 839.863874] env[68244]: value = "task-2780359" [ 839.863874] env[68244]: _type = "Task" [ 839.863874] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.872324] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.022787] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "refresh_cache-2d9dbf75-992d-4932-bd5d-84462494ebe8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.022963] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "refresh_cache-2d9dbf75-992d-4932-bd5d-84462494ebe8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.023148] env[68244]: DEBUG nova.network.neutron [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.093716] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.164812] env[68244]: DEBUG nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.273695] env[68244]: DEBUG nova.compute.utils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 840.275618] env[68244]: DEBUG nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 840.275789] env[68244]: DEBUG nova.network.neutron [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 840.331794] env[68244]: DEBUG nova.policy [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7dc24a60ae364e9a991bac51bd3ba9d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec5340c2b2a440d0ad5a75fd694ad71a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 840.349155] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780356, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.378231] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780359, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.463190] env[68244]: DEBUG nova.compute.manager [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Received event network-vif-plugged-62558620-9882-40c5-888d-85620dd9c6d9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 840.463190] env[68244]: DEBUG oslo_concurrency.lockutils [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] Acquiring lock "2d9dbf75-992d-4932-bd5d-84462494ebe8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.463190] env[68244]: DEBUG oslo_concurrency.lockutils [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.463190] env[68244]: DEBUG oslo_concurrency.lockutils [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.463604] env[68244]: DEBUG nova.compute.manager [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] No waiting events found dispatching network-vif-plugged-62558620-9882-40c5-888d-85620dd9c6d9 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.464050] env[68244]: WARNING nova.compute.manager [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Received unexpected event network-vif-plugged-62558620-9882-40c5-888d-85620dd9c6d9 for instance with vm_state building and task_state spawning. [ 840.464336] env[68244]: DEBUG nova.compute.manager [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Received event network-changed-62558620-9882-40c5-888d-85620dd9c6d9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 840.464706] env[68244]: DEBUG nova.compute.manager [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Refreshing instance network info cache due to event network-changed-62558620-9882-40c5-888d-85620dd9c6d9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 840.465068] env[68244]: DEBUG oslo_concurrency.lockutils [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] Acquiring lock "refresh_cache-2d9dbf75-992d-4932-bd5d-84462494ebe8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.590130] env[68244]: DEBUG nova.network.neutron [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.597497] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.689262] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.776250] env[68244]: DEBUG nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 840.852900] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780356, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.336133} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.852900] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 6915d271-8346-41b5-a75b-2188fd3b57d1/6915d271-8346-41b5-a75b-2188fd3b57d1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.853381] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.853381] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff06394b-2187-4c69-8ed7-8754698b2138 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.860774] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 840.860774] env[68244]: value = "task-2780360" [ 840.860774] env[68244]: _type = "Task" [ 840.860774] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.874270] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780360, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.880589] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780359, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.980354] env[68244]: DEBUG nova.network.neutron [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Updating instance_info_cache with network_info: [{"id": "62558620-9882-40c5-888d-85620dd9c6d9", "address": "fa:16:3e:95:38:27", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62558620-98", "ovs_interfaceid": "62558620-9882-40c5-888d-85620dd9c6d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.080175] env[68244]: DEBUG nova.network.neutron [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Successfully created port: cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.096993] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780358, 'name': PowerOffVM_Task, 'duration_secs': 1.093681} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.097317] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.097499] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 841.097853] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f4fe79f-59b0-4b8f-aa68-4ee201b110a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.170217] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 841.170466] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 841.174018] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Deleting the datastore file [datastore2] 340aa1e7-dc0a-4cba-8979-0c591830e9db {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.174018] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-102bb34f-b6b2-4f2d-a2cd-c3ff73af0ce1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.179469] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for the task: (returnval){ [ 841.179469] env[68244]: value = "task-2780362" [ 841.179469] env[68244]: _type = "Task" [ 841.179469] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.187270] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.366956] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fc3937-cb43-4acf-897e-534afd812cd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.378741] env[68244]: DEBUG oslo_vmware.api [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780359, 'name': PowerOnVM_Task, 'duration_secs': 1.173906} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.382959] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.383200] env[68244]: INFO nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Took 9.83 seconds to spawn the instance on the hypervisor. [ 841.383383] env[68244]: DEBUG nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 841.383699] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780360, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067047} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.384392] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38cecd5-6d3f-424b-9fff-6c46f80d4a93 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.386781] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.388594] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db92e7c-f939-443f-a000-bb64d91eedff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.392258] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71669e78-c64c-444e-bce3-27e0126c915e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.439826] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 6915d271-8346-41b5-a75b-2188fd3b57d1/6915d271-8346-41b5-a75b-2188fd3b57d1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.441450] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1065d75f-6eff-4584-a155-d9336de89c26 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.443961] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8399f38e-43a4-40da-8f2a-8094b773a9af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.463362] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaccae6-3a8f-45a6-87d5-01e240b2a7c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.468520] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 841.468520] env[68244]: value = "task-2780363" [ 841.468520] env[68244]: _type = "Task" [ 841.468520] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.479489] env[68244]: DEBUG nova.compute.provider_tree [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.485869] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "refresh_cache-2d9dbf75-992d-4932-bd5d-84462494ebe8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.486039] env[68244]: DEBUG nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Instance network_info: |[{"id": "62558620-9882-40c5-888d-85620dd9c6d9", "address": "fa:16:3e:95:38:27", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62558620-98", "ovs_interfaceid": "62558620-9882-40c5-888d-85620dd9c6d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 841.486329] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.486623] env[68244]: DEBUG oslo_concurrency.lockutils [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] Acquired lock "refresh_cache-2d9dbf75-992d-4932-bd5d-84462494ebe8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.486819] env[68244]: DEBUG nova.network.neutron [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Refreshing network info cache for port 62558620-9882-40c5-888d-85620dd9c6d9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.487867] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:38:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62558620-9882-40c5-888d-85620dd9c6d9', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.497294] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 841.497294] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 841.497294] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddc0dea6-73d4-4524-b214-38f9fc929ec6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.516502] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.516502] env[68244]: value = "task-2780364" [ 841.516502] env[68244]: _type = "Task" [ 841.516502] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.525895] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780364, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.690332] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.791353] env[68244]: DEBUG nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 841.815049] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 841.815331] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.815493] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 841.815674] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.815815] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 841.815957] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 841.816179] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 841.816578] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 841.816578] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 841.816728] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 841.816802] env[68244]: DEBUG nova.virt.hardware [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 841.817694] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992afd43-7416-4ea4-a1f7-09a7b1ff33a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.825447] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcbc0d0-1031-4e48-953d-c87a564eb4b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.947735] env[68244]: INFO nova.compute.manager [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Took 59.14 seconds to build instance. [ 841.980607] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.982633] env[68244]: DEBUG nova.scheduler.client.report [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.027683] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780364, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.193992] env[68244]: DEBUG oslo_vmware.api [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Task: {'id': task-2780362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.606359} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.194265] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.194450] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 842.194622] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 842.194797] env[68244]: INFO nova.compute.manager [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Took 2.63 seconds to destroy the instance on the hypervisor. [ 842.195055] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.195910] env[68244]: DEBUG nova.compute.manager [-] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 842.195910] env[68244]: DEBUG nova.network.neutron [-] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 842.302593] env[68244]: DEBUG nova.network.neutron [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Updated VIF entry in instance network info cache for port 62558620-9882-40c5-888d-85620dd9c6d9. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.303616] env[68244]: DEBUG nova.network.neutron [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Updating instance_info_cache with network_info: [{"id": "62558620-9882-40c5-888d-85620dd9c6d9", "address": "fa:16:3e:95:38:27", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62558620-98", "ovs_interfaceid": "62558620-9882-40c5-888d-85620dd9c6d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.450043] env[68244]: DEBUG oslo_concurrency.lockutils [None req-507b3490-90ff-4d31-a28b-8bd67bb252c6 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "085b318d-e704-46f9-89a6-679b8aa49f85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.266s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.464890] env[68244]: DEBUG nova.compute.manager [req-19954b9e-162a-4b0e-b3b9-8be0699973b6 req-7256eadc-e1dd-4fe6-8576-33e1ca6106aa service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Received event network-vif-deleted-e3259135-2e40-4236-8149-b4172ef87318 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 842.465173] env[68244]: INFO nova.compute.manager [req-19954b9e-162a-4b0e-b3b9-8be0699973b6 req-7256eadc-e1dd-4fe6-8576-33e1ca6106aa service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Neutron deleted interface e3259135-2e40-4236-8149-b4172ef87318; detaching it from the instance and deleting it from the info cache [ 842.465415] env[68244]: DEBUG nova.network.neutron [req-19954b9e-162a-4b0e-b3b9-8be0699973b6 req-7256eadc-e1dd-4fe6-8576-33e1ca6106aa service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.480022] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780363, 'name': ReconfigVM_Task, 'duration_secs': 0.776565} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.480300] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 6915d271-8346-41b5-a75b-2188fd3b57d1/6915d271-8346-41b5-a75b-2188fd3b57d1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.480921] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-460d3920-d3cf-44a2-bdaf-3395990b1f67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.488119] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.488589] env[68244]: DEBUG nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 842.491142] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 842.491142] env[68244]: value = "task-2780365" [ 842.491142] env[68244]: _type = "Task" [ 842.491142] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.491571] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.080s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.491753] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.493757] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 41.699s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.493946] env[68244]: DEBUG nova.objects.instance [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 842.505634] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780365, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.524473] env[68244]: INFO nova.scheduler.client.report [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Deleted allocations for instance 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c [ 842.534354] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780364, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.625367] env[68244]: DEBUG nova.compute.manager [req-77a1d5e1-314e-4985-924b-64458a47f96a req-907b0218-5a87-482b-b43a-a8752e48cd86 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Received event network-vif-plugged-cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 842.625594] env[68244]: DEBUG oslo_concurrency.lockutils [req-77a1d5e1-314e-4985-924b-64458a47f96a req-907b0218-5a87-482b-b43a-a8752e48cd86 service nova] Acquiring lock "774ce6f8-6273-4f2b-b398-ee8c44d79520-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.625793] env[68244]: DEBUG oslo_concurrency.lockutils [req-77a1d5e1-314e-4985-924b-64458a47f96a req-907b0218-5a87-482b-b43a-a8752e48cd86 service nova] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.625987] env[68244]: DEBUG oslo_concurrency.lockutils [req-77a1d5e1-314e-4985-924b-64458a47f96a req-907b0218-5a87-482b-b43a-a8752e48cd86 service nova] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.626444] env[68244]: DEBUG nova.compute.manager [req-77a1d5e1-314e-4985-924b-64458a47f96a req-907b0218-5a87-482b-b43a-a8752e48cd86 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] No waiting events found dispatching network-vif-plugged-cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 842.626689] env[68244]: WARNING nova.compute.manager [req-77a1d5e1-314e-4985-924b-64458a47f96a req-907b0218-5a87-482b-b43a-a8752e48cd86 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Received unexpected event network-vif-plugged-cb5af382-be2b-47cb-8edb-e23ce944a42d for instance with vm_state building and task_state spawning. [ 842.777699] env[68244]: DEBUG nova.network.neutron [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Successfully updated port: cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.806545] env[68244]: DEBUG oslo_concurrency.lockutils [req-c62316ae-0480-4f98-ab74-11f76da21a0e req-03ae9bd9-420a-478c-ac5c-8c69dab98453 service nova] Releasing lock "refresh_cache-2d9dbf75-992d-4932-bd5d-84462494ebe8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.950582] env[68244]: DEBUG nova.network.neutron [-] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.952042] env[68244]: DEBUG nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 842.968024] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e690a2e-dfe5-468c-a0ee-9906c7706b21 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.977894] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81002c70-932d-47cd-8852-02def030f11a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.992718] env[68244]: DEBUG nova.compute.utils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 842.994099] env[68244]: DEBUG nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 842.994895] env[68244]: DEBUG nova.network.neutron [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 843.017963] env[68244]: DEBUG nova.compute.manager [req-19954b9e-162a-4b0e-b3b9-8be0699973b6 req-7256eadc-e1dd-4fe6-8576-33e1ca6106aa service nova] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Detach interface failed, port_id=e3259135-2e40-4236-8149-b4172ef87318, reason: Instance 340aa1e7-dc0a-4cba-8979-0c591830e9db could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 843.025328] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780365, 'name': Rename_Task, 'duration_secs': 0.247422} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.025942] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.025942] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41159a06-02f6-44c5-9e13-0e9ddab7f63e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.030796] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780364, 'name': CreateVM_Task, 'duration_secs': 1.391307} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.031456] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.032740] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.032920] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.033260] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 843.033508] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea602e6f-0600-4ee3-bcb7-da00bc0b47e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.038151] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 843.038151] env[68244]: value = "task-2780366" [ 843.038151] env[68244]: _type = "Task" [ 843.038151] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.038888] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bcf47a03-0577-4f38-b02c-59a0e2e7b594 tempest-VolumesAssistedSnapshotsTest-1916475659 tempest-VolumesAssistedSnapshotsTest-1916475659-project-member] Lock "8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.827s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.044886] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 843.044886] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523adac8-82d3-b704-3246-599da5f80713" [ 843.044886] env[68244]: _type = "Task" [ 843.044886] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.052522] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780366, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.061779] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523adac8-82d3-b704-3246-599da5f80713, 'name': SearchDatastore_Task, 'duration_secs': 0.012847} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.062155] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.062405] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.062677] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.062823] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.062995] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.063274] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88b5d149-2b48-4d1b-bf6f-881b5ad72cda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.072120] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.072389] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.073347] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8cd6565-1021-4be8-92ef-dd8a8b589dab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.079763] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 843.079763] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c99ab0-122a-f656-a6b4-17ef9d6079bd" [ 843.079763] env[68244]: _type = "Task" [ 843.079763] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.087754] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c99ab0-122a-f656-a6b4-17ef9d6079bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.089323] env[68244]: DEBUG nova.policy [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fa4f9c8b0f1450788cd56a89e23d93a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a581fe596ee49c6b66f17d1ed11d120', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 843.280571] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.280721] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.281081] env[68244]: DEBUG nova.network.neutron [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.455558] env[68244]: INFO nova.compute.manager [-] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Took 1.26 seconds to deallocate network for instance. [ 843.499582] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.500141] env[68244]: DEBUG nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 843.518581] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eeb1850e-bf41-4543-9621-3e2f949fe27f tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.519696] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.583s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.522578] env[68244]: INFO nova.compute.claims [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 843.551204] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780366, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.560665] env[68244]: DEBUG nova.network.neutron [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Successfully created port: bb9be0d6-efed-48c5-898c-b87f41e103ab {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.592388] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c99ab0-122a-f656-a6b4-17ef9d6079bd, 'name': SearchDatastore_Task, 'duration_secs': 0.011664} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.594226] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f216f4d-37c3-496b-8f3a-e1a5e65a3929 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.600879] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 843.600879] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b00cc7-aa31-dc42-e9ee-ee1ef15abc78" [ 843.600879] env[68244]: _type = "Task" [ 843.600879] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.612262] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b00cc7-aa31-dc42-e9ee-ee1ef15abc78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.839070] env[68244]: DEBUG nova.network.neutron [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.968468] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.052455] env[68244]: DEBUG oslo_vmware.api [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780366, 'name': PowerOnVM_Task, 'duration_secs': 0.754135} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.054177] env[68244]: DEBUG nova.network.neutron [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updating instance_info_cache with network_info: [{"id": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "address": "fa:16:3e:99:b9:a1", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb5af382-be", "ovs_interfaceid": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.055461] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.055710] env[68244]: INFO nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Took 9.80 seconds to spawn the instance on the hypervisor. [ 844.055903] env[68244]: DEBUG nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.057848] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329c7288-e127-44f7-94a1-c8549fe7ead2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.114262] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b00cc7-aa31-dc42-e9ee-ee1ef15abc78, 'name': SearchDatastore_Task, 'duration_secs': 0.01299} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.114601] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.114949] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2d9dbf75-992d-4932-bd5d-84462494ebe8/2d9dbf75-992d-4932-bd5d-84462494ebe8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.115606] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4e02560-82ce-4135-b6f1-8a228240777c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.126916] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 844.126916] env[68244]: value = "task-2780367" [ 844.126916] env[68244]: _type = "Task" [ 844.126916] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.134301] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.512868] env[68244]: DEBUG nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 844.536641] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.536834] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.536992] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.537192] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.537370] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.537597] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.537736] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.537915] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.538110] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.538309] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.538702] env[68244]: DEBUG nova.virt.hardware [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.539419] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a791d2-bef9-40a6-ac3e-0cf450e07afd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.548300] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea92793e-d191-4fc8-996e-10ff5de837c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.558302] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Releasing lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.558542] env[68244]: DEBUG nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Instance network_info: |[{"id": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "address": "fa:16:3e:99:b9:a1", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb5af382-be", "ovs_interfaceid": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 844.567628] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:b9:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb5af382-be2b-47cb-8edb-e23ce944a42d', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.575204] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.583052] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.583642] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a272134-28c0-45ad-8b4e-91e3c3e25377 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.604860] env[68244]: INFO nova.compute.manager [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Took 54.67 seconds to build instance. [ 844.621153] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.621153] env[68244]: value = "task-2780368" [ 844.621153] env[68244]: _type = "Task" [ 844.621153] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.635212] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780368, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.644145] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780367, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.723444] env[68244]: DEBUG nova.compute.manager [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Received event network-changed-cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 844.723703] env[68244]: DEBUG nova.compute.manager [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Refreshing instance network info cache due to event network-changed-cb5af382-be2b-47cb-8edb-e23ce944a42d. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 844.723945] env[68244]: DEBUG oslo_concurrency.lockutils [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] Acquiring lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.724174] env[68244]: DEBUG oslo_concurrency.lockutils [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] Acquired lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.724357] env[68244]: DEBUG nova.network.neutron [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Refreshing network info cache for port cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.112592] env[68244]: DEBUG oslo_concurrency.lockutils [None req-faaf2d6e-e80b-4745-9d52-d52ef2a3d60d tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.554s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.138090] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780368, 'name': CreateVM_Task, 'duration_secs': 0.486493} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.144474] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.145273] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780367, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.77268} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.146094] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.146250] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.147396] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.147396] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2d9dbf75-992d-4932-bd5d-84462494ebe8/2d9dbf75-992d-4932-bd5d-84462494ebe8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.147396] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.147396] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57492f23-c3d6-4946-a1cb-d3963afc8ae8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.149179] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f09e929-d3a2-4b1f-b57e-d9aa94623a69 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.155366] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 845.155366] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52595aa7-f858-2e50-3ed7-44dad19f7c11" [ 845.155366] env[68244]: _type = "Task" [ 845.155366] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.156660] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 845.156660] env[68244]: value = "task-2780369" [ 845.156660] env[68244]: _type = "Task" [ 845.156660] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.170432] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52595aa7-f858-2e50-3ed7-44dad19f7c11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.173184] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.229574] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f1f4b4-e53c-4d78-9a90-1a6fd2484f5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.240411] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10a3614-8f16-4ce4-9957-992b19fb0d03 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.282075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f627e62-5fbc-4845-a283-9cec37e6d959 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.293018] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2364bec0-c51d-47db-a07b-9a21ff11838b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.305430] env[68244]: DEBUG nova.compute.provider_tree [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.467389] env[68244]: DEBUG nova.network.neutron [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updated VIF entry in instance network info cache for port cb5af382-be2b-47cb-8edb-e23ce944a42d. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.467790] env[68244]: DEBUG nova.network.neutron [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updating instance_info_cache with network_info: [{"id": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "address": "fa:16:3e:99:b9:a1", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb5af382-be", "ovs_interfaceid": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.612575] env[68244]: DEBUG nova.network.neutron [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Successfully updated port: bb9be0d6-efed-48c5-898c-b87f41e103ab {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.613959] env[68244]: DEBUG nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 845.679115] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52595aa7-f858-2e50-3ed7-44dad19f7c11, 'name': SearchDatastore_Task, 'duration_secs': 0.017837} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.679721] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071089} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.679721] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.679953] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.680109] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.680251] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.680429] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.680827] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.680946] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ffda553-b63e-4c75-8d09-ddeddacf2887 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.683823] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca9d30d-3b4a-4cbc-b28b-517189134878 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.708312] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 2d9dbf75-992d-4932-bd5d-84462494ebe8/2d9dbf75-992d-4932-bd5d-84462494ebe8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.710299] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d9eaf76-c780-4d71-96f9-ac5d69073972 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.725331] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.725331] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.727809] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-525a7b87-69c2-43e7-aaa0-7b5f82ca75c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.733789] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 845.733789] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f72212-3e5c-dfb1-a4b7-35cd43f106a0" [ 845.733789] env[68244]: _type = "Task" [ 845.733789] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.738098] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 845.738098] env[68244]: value = "task-2780370" [ 845.738098] env[68244]: _type = "Task" [ 845.738098] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.744488] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f72212-3e5c-dfb1-a4b7-35cd43f106a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.749515] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780370, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.808988] env[68244]: DEBUG nova.scheduler.client.report [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.867230] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.867577] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.970694] env[68244]: DEBUG oslo_concurrency.lockutils [req-e17b81d6-2980-41e2-ba0f-f95825b25768 req-5eeba0c3-6e3e-4021-9d74-d5ed793f56ce service nova] Releasing lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.117763] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "refresh_cache-8c00240d-5124-4ada-bd4d-4acd39a345c8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.118077] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "refresh_cache-8c00240d-5124-4ada-bd4d-4acd39a345c8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.118144] env[68244]: DEBUG nova.network.neutron [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.140861] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.252611] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.253085] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f72212-3e5c-dfb1-a4b7-35cd43f106a0, 'name': SearchDatastore_Task, 'duration_secs': 0.037612} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.254281] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aedc658a-293a-4b83-bc95-b0f8c29c488b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.261027] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 846.261027] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522ee3ff-1b6d-4a16-54b2-bc4cb7e0e6e2" [ 846.261027] env[68244]: _type = "Task" [ 846.261027] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.269816] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522ee3ff-1b6d-4a16-54b2-bc4cb7e0e6e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.314879] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.315522] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 846.318304] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 41.342s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.322022] env[68244]: DEBUG nova.objects.instance [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 846.371404] env[68244]: DEBUG nova.compute.utils [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 846.663227] env[68244]: DEBUG nova.network.neutron [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.751162] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780370, 'name': ReconfigVM_Task, 'duration_secs': 0.63778} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.751459] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 2d9dbf75-992d-4932-bd5d-84462494ebe8/2d9dbf75-992d-4932-bd5d-84462494ebe8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.752137] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2028fab2-d577-466f-b1fb-0d987964f077 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.759484] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 846.759484] env[68244]: value = "task-2780371" [ 846.759484] env[68244]: _type = "Task" [ 846.759484] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.773504] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780371, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.777720] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522ee3ff-1b6d-4a16-54b2-bc4cb7e0e6e2, 'name': SearchDatastore_Task, 'duration_secs': 0.011488} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.778113] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.778421] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 774ce6f8-6273-4f2b-b398-ee8c44d79520/774ce6f8-6273-4f2b-b398-ee8c44d79520.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.778710] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccecbc0d-26a8-4e1d-9861-df2e6e77fa41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.785868] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 846.785868] env[68244]: value = "task-2780372" [ 846.785868] env[68244]: _type = "Task" [ 846.785868] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.801550] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.822966] env[68244]: DEBUG nova.compute.utils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 846.829505] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 846.830079] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 846.873988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.894348] env[68244]: DEBUG nova.policy [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ded03c2489d4b9a95b4f10a35eed6ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6539e5ec36f0484f85e61fa8b4ef3f9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 846.907676] env[68244]: DEBUG nova.network.neutron [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Updating instance_info_cache with network_info: [{"id": "bb9be0d6-efed-48c5-898c-b87f41e103ab", "address": "fa:16:3e:36:9c:e1", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb9be0d6-ef", "ovs_interfaceid": "bb9be0d6-efed-48c5-898c-b87f41e103ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.268177] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Successfully created port: b9e56a35-cb81-41d0-81d4-951205f52fa0 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.276851] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780371, 'name': Rename_Task, 'duration_secs': 0.178112} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.277316] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.277675] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57569caa-397b-42b8-a5b6-4e8f317d0551 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.288036] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 847.288036] env[68244]: value = "task-2780373" [ 847.288036] env[68244]: _type = "Task" [ 847.288036] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.307037] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780373, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.312172] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780372, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.329889] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 847.335303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bf6882e2-7052-43ba-8f5b-2b6c6f2e9b46 tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.336485] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.476s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.336740] env[68244]: DEBUG nova.objects.instance [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lazy-loading 'resources' on Instance uuid c9f5fbeb-28b6-4b41-9156-5b90bc19977c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.411012] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "refresh_cache-8c00240d-5124-4ada-bd4d-4acd39a345c8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.411397] env[68244]: DEBUG nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Instance network_info: |[{"id": "bb9be0d6-efed-48c5-898c-b87f41e103ab", "address": "fa:16:3e:36:9c:e1", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb9be0d6-ef", "ovs_interfaceid": "bb9be0d6-efed-48c5-898c-b87f41e103ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 847.411881] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:9c:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb9be0d6-efed-48c5-898c-b87f41e103ab', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.421239] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.422567] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.422567] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e96b0d3-9d1f-4c09-abd7-0bbedd179a3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.457151] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.457151] env[68244]: value = "task-2780374" [ 847.457151] env[68244]: _type = "Task" [ 847.457151] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.476581] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780374, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.684140] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "d81bdefa-9c23-413b-9670-bbb2139084f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.684140] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.684140] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "d81bdefa-9c23-413b-9670-bbb2139084f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.684140] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.684284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.686428] env[68244]: INFO nova.compute.manager [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Terminating instance [ 847.706337] env[68244]: DEBUG nova.compute.manager [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Received event network-vif-plugged-bb9be0d6-efed-48c5-898c-b87f41e103ab {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 847.706652] env[68244]: DEBUG oslo_concurrency.lockutils [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] Acquiring lock "8c00240d-5124-4ada-bd4d-4acd39a345c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.706934] env[68244]: DEBUG oslo_concurrency.lockutils [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.707298] env[68244]: DEBUG oslo_concurrency.lockutils [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.707464] env[68244]: DEBUG nova.compute.manager [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] No waiting events found dispatching network-vif-plugged-bb9be0d6-efed-48c5-898c-b87f41e103ab {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 847.707719] env[68244]: WARNING nova.compute.manager [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Received unexpected event network-vif-plugged-bb9be0d6-efed-48c5-898c-b87f41e103ab for instance with vm_state building and task_state spawning. [ 847.707982] env[68244]: DEBUG nova.compute.manager [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Received event network-changed-bb9be0d6-efed-48c5-898c-b87f41e103ab {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 847.708225] env[68244]: DEBUG nova.compute.manager [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Refreshing instance network info cache due to event network-changed-bb9be0d6-efed-48c5-898c-b87f41e103ab. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 847.708491] env[68244]: DEBUG oslo_concurrency.lockutils [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] Acquiring lock "refresh_cache-8c00240d-5124-4ada-bd4d-4acd39a345c8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.708689] env[68244]: DEBUG oslo_concurrency.lockutils [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] Acquired lock "refresh_cache-8c00240d-5124-4ada-bd4d-4acd39a345c8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.708907] env[68244]: DEBUG nova.network.neutron [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Refreshing network info cache for port bb9be0d6-efed-48c5-898c-b87f41e103ab {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.798113] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614834} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.801556] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 774ce6f8-6273-4f2b-b398-ee8c44d79520/774ce6f8-6273-4f2b-b398-ee8c44d79520.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.801866] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.802228] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-448624ea-e0cf-47f3-8955-5ecc9638f070 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.811099] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780373, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.812599] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 847.812599] env[68244]: value = "task-2780375" [ 847.812599] env[68244]: _type = "Task" [ 847.812599] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.821356] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.974024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.974024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.974776] env[68244]: INFO nova.compute.manager [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Attaching volume 9f21b969-836e-43d7-9941-918bcd8d1ee8 to /dev/sdb [ 847.977091] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780374, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.023466] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32fd317-6e70-43f6-8709-dfdeee112b75 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.031226] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b6ec43-adea-428a-850f-fd2bf4a9f03a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.044546] env[68244]: DEBUG nova.virt.block_device [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Updating existing volume attachment record: 71fd18d4-c671-46f6-91d0-78061c52c4a5 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 848.194294] env[68244]: DEBUG nova.compute.manager [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 848.194294] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.194294] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d5a8be-f811-4817-ad8d-68cce0bdeaf0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.206164] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.206752] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-628b6505-5416-467c-83ae-bccc029ad044 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.219202] env[68244]: DEBUG oslo_vmware.api [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 848.219202] env[68244]: value = "task-2780377" [ 848.219202] env[68244]: _type = "Task" [ 848.219202] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.228218] env[68244]: DEBUG oslo_vmware.api [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2780377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.311733] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780373, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.327662] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128286} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.327662] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.328624] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469d384a-85a8-447d-bf89-30b2c3a03283 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.355272] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 848.370554] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 774ce6f8-6273-4f2b-b398-ee8c44d79520/774ce6f8-6273-4f2b-b398-ee8c44d79520.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.373850] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8ca31df-13e7-4b67-8609-05bdb3559299 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.399595] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 848.399595] env[68244]: value = "task-2780380" [ 848.399595] env[68244]: _type = "Task" [ 848.399595] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 848.410659] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 848.410659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5c76e4-338e-4c14-8653-16969e5fe941 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.422014] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.428022] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46a0047-c4e8-4747-abdd-e8caa477ada9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.471052] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780374, 'name': CreateVM_Task, 'duration_secs': 0.80747} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.471274] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.472088] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.472271] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.472593] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 848.472894] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb138dd9-a21d-4acf-a12d-cf7c97c1f9cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.478201] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 848.478201] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5296181e-1dd4-a779-8e21-58d0bedcb1d5" [ 848.478201] env[68244]: _type = "Task" [ 848.478201] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.489845] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5296181e-1dd4-a779-8e21-58d0bedcb1d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.496694] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb57e32-0716-47e5-8e04-c4679e726f07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.507159] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c052d22-05f7-4497-a45a-fe826b65f6ee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.547167] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1546c80-7013-49f7-b3ca-b84f416b5cd1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.555840] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bb52d4-3301-46d9-add9-dfedf34d419d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.571146] env[68244]: DEBUG nova.compute.provider_tree [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.733020] env[68244]: DEBUG oslo_vmware.api [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2780377, 'name': PowerOffVM_Task, 'duration_secs': 0.343206} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.733020] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 848.733020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 848.733020] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ad57be9-361e-440c-b8e4-f1b2f9ad5800 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.812126] env[68244]: DEBUG oslo_vmware.api [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780373, 'name': PowerOnVM_Task, 'duration_secs': 1.174026} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.813579] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.813839] env[68244]: INFO nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Took 10.27 seconds to spawn the instance on the hypervisor. [ 848.814038] env[68244]: DEBUG nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.814347] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 848.814595] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 848.814722] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Deleting the datastore file [datastore2] d81bdefa-9c23-413b-9670-bbb2139084f7 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 848.815531] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f75d9d6-ae95-4d65-a940-c46bf7176683 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.818495] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82b17bc7-9d8d-4b86-b39d-85ac1142c786 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.821042] env[68244]: DEBUG nova.network.neutron [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Updated VIF entry in instance network info cache for port bb9be0d6-efed-48c5-898c-b87f41e103ab. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.821383] env[68244]: DEBUG nova.network.neutron [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Updating instance_info_cache with network_info: [{"id": "bb9be0d6-efed-48c5-898c-b87f41e103ab", "address": "fa:16:3e:36:9c:e1", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb9be0d6-ef", "ovs_interfaceid": "bb9be0d6-efed-48c5-898c-b87f41e103ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.831430] env[68244]: DEBUG oslo_vmware.api [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for the task: (returnval){ [ 848.831430] env[68244]: value = "task-2780382" [ 848.831430] env[68244]: _type = "Task" [ 848.831430] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.841063] env[68244]: DEBUG oslo_vmware.api [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2780382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.859199] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Successfully updated port: b9e56a35-cb81-41d0-81d4-951205f52fa0 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.910113] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780380, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.922569] env[68244]: DEBUG nova.compute.manager [req-86cdc848-982d-4873-b42e-d51869aa101e req-40ddf63d-1c0f-4ede-93ce-0ce01d02d518 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Received event network-vif-plugged-b9e56a35-cb81-41d0-81d4-951205f52fa0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 848.922787] env[68244]: DEBUG oslo_concurrency.lockutils [req-86cdc848-982d-4873-b42e-d51869aa101e req-40ddf63d-1c0f-4ede-93ce-0ce01d02d518 service nova] Acquiring lock "184f7694-9cab-4184-a1c0-926763a81baf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.923021] env[68244]: DEBUG oslo_concurrency.lockutils [req-86cdc848-982d-4873-b42e-d51869aa101e req-40ddf63d-1c0f-4ede-93ce-0ce01d02d518 service nova] Lock "184f7694-9cab-4184-a1c0-926763a81baf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.923209] env[68244]: DEBUG oslo_concurrency.lockutils [req-86cdc848-982d-4873-b42e-d51869aa101e req-40ddf63d-1c0f-4ede-93ce-0ce01d02d518 service nova] Lock "184f7694-9cab-4184-a1c0-926763a81baf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.923393] env[68244]: DEBUG nova.compute.manager [req-86cdc848-982d-4873-b42e-d51869aa101e req-40ddf63d-1c0f-4ede-93ce-0ce01d02d518 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] No waiting events found dispatching network-vif-plugged-b9e56a35-cb81-41d0-81d4-951205f52fa0 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 848.923571] env[68244]: WARNING nova.compute.manager [req-86cdc848-982d-4873-b42e-d51869aa101e req-40ddf63d-1c0f-4ede-93ce-0ce01d02d518 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Received unexpected event network-vif-plugged-b9e56a35-cb81-41d0-81d4-951205f52fa0 for instance with vm_state building and task_state spawning. [ 848.993226] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5296181e-1dd4-a779-8e21-58d0bedcb1d5, 'name': SearchDatastore_Task, 'duration_secs': 0.012038} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.993592] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.993855] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.994101] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.994283] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.996470] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.996470] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f7f017e-4bad-4fee-8ad4-40c690a82141 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.005085] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.005085] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.005085] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-000c9489-0c0c-4db0-998e-e404dda77428 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.010607] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 849.010607] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e8294e-8309-6267-99ef-2ac9dd8156f6" [ 849.010607] env[68244]: _type = "Task" [ 849.010607] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.018792] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e8294e-8309-6267-99ef-2ac9dd8156f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.075036] env[68244]: DEBUG nova.scheduler.client.report [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.325353] env[68244]: DEBUG oslo_concurrency.lockutils [req-fe726733-bcee-4c38-8890-7df1c48c993a req-7dfe9727-fc40-45c9-9a22-c5d555d74eb0 service nova] Releasing lock "refresh_cache-8c00240d-5124-4ada-bd4d-4acd39a345c8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.348255] env[68244]: INFO nova.compute.manager [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Took 55.72 seconds to build instance. [ 849.353370] env[68244]: DEBUG oslo_vmware.api [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Task: {'id': task-2780382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204222} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.353804] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.353988] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.354196] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.354720] env[68244]: INFO nova.compute.manager [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 849.354720] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.354874] env[68244]: DEBUG nova.compute.manager [-] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 849.354874] env[68244]: DEBUG nova.network.neutron [-] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 849.361884] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "refresh_cache-184f7694-9cab-4184-a1c0-926763a81baf" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.361884] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "refresh_cache-184f7694-9cab-4184-a1c0-926763a81baf" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.361884] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.411745] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780380, 'name': ReconfigVM_Task, 'duration_secs': 0.5215} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.412150] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 774ce6f8-6273-4f2b-b398-ee8c44d79520/774ce6f8-6273-4f2b-b398-ee8c44d79520.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.412805] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6d1b5b7-f042-4e3c-a9ff-57ba05b3b94a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.422359] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 849.422359] env[68244]: value = "task-2780383" [ 849.422359] env[68244]: _type = "Task" [ 849.422359] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.431821] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780383, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.521958] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e8294e-8309-6267-99ef-2ac9dd8156f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010922} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.522818] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-836f3ad4-5d75-457f-af58-9415a9c5d4a0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.534008] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 849.534008] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f423f0-c647-b25b-b8cf-69b7be427ec2" [ 849.534008] env[68244]: _type = "Task" [ 849.534008] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.543927] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f423f0-c647-b25b-b8cf-69b7be427ec2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.584095] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.586684] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1d140-dd8a-f46a-0f02-6b2bbde44d1f/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 849.586684] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.020s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.586684] env[68244]: DEBUG nova.objects.instance [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lazy-loading 'resources' on Instance uuid 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.587981] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137aeb30-6306-4b9b-b273-cc91e16c81ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.595269] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1d140-dd8a-f46a-0f02-6b2bbde44d1f/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 849.595269] env[68244]: ERROR oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1d140-dd8a-f46a-0f02-6b2bbde44d1f/disk-0.vmdk due to incomplete transfer. [ 849.595713] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f7f756a8-9213-48d2-b53d-74e2e5e399d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.605008] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e1d140-dd8a-f46a-0f02-6b2bbde44d1f/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 849.605449] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Uploaded image e7140df0-f9b6-40bd-8576-e37095b62021 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 849.607687] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 849.608230] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-129b10cf-2b7a-4c7a-8d5f-af0850209532 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.614801] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 849.614801] env[68244]: value = "task-2780384" [ 849.614801] env[68244]: _type = "Task" [ 849.614801] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.618744] env[68244]: INFO nova.scheduler.client.report [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted allocations for instance c9f5fbeb-28b6-4b41-9156-5b90bc19977c [ 849.627541] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780384, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.854943] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f69440ff-3519-425d-af32-1b9bd780a2de tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.368s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.892534] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.940210] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780383, 'name': Rename_Task, 'duration_secs': 0.318261} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.940355] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.940597] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbeb5317-bb93-4a46-9c31-0e002f5c04df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.947249] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 849.947249] env[68244]: value = "task-2780385" [ 849.947249] env[68244]: _type = "Task" [ 849.947249] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.957609] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.048959] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f423f0-c647-b25b-b8cf-69b7be427ec2, 'name': SearchDatastore_Task, 'duration_secs': 0.010212} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.048959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.048959] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8c00240d-5124-4ada-bd4d-4acd39a345c8/8c00240d-5124-4ada-bd4d-4acd39a345c8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.048959] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f9c9415-4e9c-4c26-b0dc-34c536438c08 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.058360] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 850.058360] env[68244]: value = "task-2780386" [ 850.058360] env[68244]: _type = "Task" [ 850.058360] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.071456] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.116246] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Updating instance_info_cache with network_info: [{"id": "b9e56a35-cb81-41d0-81d4-951205f52fa0", "address": "fa:16:3e:ef:78:34", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e56a35-cb", "ovs_interfaceid": "b9e56a35-cb81-41d0-81d4-951205f52fa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.131342] env[68244]: DEBUG oslo_concurrency.lockutils [None req-133dd89d-0f58-4aa1-969c-494533b86e4b tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "c9f5fbeb-28b6-4b41-9156-5b90bc19977c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.071s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.135840] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780384, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.217732] env[68244]: DEBUG oslo_concurrency.lockutils [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.218346] env[68244]: DEBUG oslo_concurrency.lockutils [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.218346] env[68244]: DEBUG nova.compute.manager [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.220115] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51701ac-390f-402d-80ba-9da89e8c6021 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.227591] env[68244]: DEBUG nova.compute.manager [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 850.228323] env[68244]: DEBUG nova.objects.instance [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lazy-loading 'flavor' on Instance uuid b0b79f25-f97d-4d59-ae80-2f8c09201073 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.358138] env[68244]: DEBUG nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 850.453408] env[68244]: DEBUG nova.network.neutron [-] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.461836] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780385, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.570513] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780386, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468704} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.573518] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8c00240d-5124-4ada-bd4d-4acd39a345c8/8c00240d-5124-4ada-bd4d-4acd39a345c8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.573832] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.574387] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7bb9b07-ce92-4495-86c8-e8992219b29e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.581918] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 850.581918] env[68244]: value = "task-2780388" [ 850.581918] env[68244]: _type = "Task" [ 850.581918] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.595046] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780388, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.629862] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "refresh_cache-184f7694-9cab-4184-a1c0-926763a81baf" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.630262] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Instance network_info: |[{"id": "b9e56a35-cb81-41d0-81d4-951205f52fa0", "address": "fa:16:3e:ef:78:34", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e56a35-cb", "ovs_interfaceid": "b9e56a35-cb81-41d0-81d4-951205f52fa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 850.630560] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780384, 'name': Destroy_Task, 'duration_secs': 0.777504} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.630944] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:78:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9e56a35-cb81-41d0-81d4-951205f52fa0', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 850.640280] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 850.640280] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Destroyed the VM [ 850.640280] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 850.643954] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 850.643954] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-368909c7-0b03-4d72-b16b-75215d2fa09b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.644233] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ccc10c0-5e0b-4149-884f-f16c68d307ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.668966] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 850.668966] env[68244]: value = "task-2780389" [ 850.668966] env[68244]: _type = "Task" [ 850.668966] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.675379] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.675379] env[68244]: value = "task-2780390" [ 850.675379] env[68244]: _type = "Task" [ 850.675379] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.682559] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780389, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.691052] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780390, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.775387] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd602c50-a553-43a8-a4c8-a8f40437883c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.788958] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917fa254-7177-4420-84dc-21747c9ce61a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.833605] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584658eb-4d4c-47ab-9b73-e9b458d34ead {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.846020] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc04383-92ca-40bb-92df-68e39229333a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.864515] env[68244]: DEBUG nova.compute.provider_tree [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.882317] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.958043] env[68244]: DEBUG oslo_vmware.api [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780385, 'name': PowerOnVM_Task, 'duration_secs': 0.779241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.958719] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.958719] env[68244]: INFO nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Took 9.17 seconds to spawn the instance on the hypervisor. [ 850.958866] env[68244]: DEBUG nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.960419] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d83d84b-1495-421e-8f84-fc29dc114c0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.962543] env[68244]: INFO nova.compute.manager [-] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Took 1.61 seconds to deallocate network for instance. [ 851.092839] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780388, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071312} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.093171] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.094049] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47404e33-deec-44a8-8f0e-35d43d625199 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.131748] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 8c00240d-5124-4ada-bd4d-4acd39a345c8/8c00240d-5124-4ada-bd4d-4acd39a345c8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.131748] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffcea07f-6c84-4e07-bf90-cfed3d8bb283 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.155943] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 851.155943] env[68244]: value = "task-2780391" [ 851.155943] env[68244]: _type = "Task" [ 851.155943] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.165404] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.185480] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780389, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.189772] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780390, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.236213] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 851.236594] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d350c3f-e0c7-46f8-844e-c5fca68d35a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.245668] env[68244]: DEBUG oslo_vmware.api [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 851.245668] env[68244]: value = "task-2780392" [ 851.245668] env[68244]: _type = "Task" [ 851.245668] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.254189] env[68244]: DEBUG oslo_vmware.api [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.288145] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "100ec1f9-6776-4832-a4c2-e9a4def0d350" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.288518] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.288778] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "100ec1f9-6776-4832-a4c2-e9a4def0d350-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.288974] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.289178] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.291601] env[68244]: INFO nova.compute.manager [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Terminating instance [ 851.367757] env[68244]: DEBUG nova.scheduler.client.report [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 851.378594] env[68244]: DEBUG nova.compute.manager [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Received event network-changed-b9e56a35-cb81-41d0-81d4-951205f52fa0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 851.379154] env[68244]: DEBUG nova.compute.manager [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Refreshing instance network info cache due to event network-changed-b9e56a35-cb81-41d0-81d4-951205f52fa0. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 851.379279] env[68244]: DEBUG oslo_concurrency.lockutils [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] Acquiring lock "refresh_cache-184f7694-9cab-4184-a1c0-926763a81baf" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.379376] env[68244]: DEBUG oslo_concurrency.lockutils [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] Acquired lock "refresh_cache-184f7694-9cab-4184-a1c0-926763a81baf" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.379685] env[68244]: DEBUG nova.network.neutron [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Refreshing network info cache for port b9e56a35-cb81-41d0-81d4-951205f52fa0 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.481022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.481816] env[68244]: INFO nova.compute.manager [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Took 54.78 seconds to build instance. [ 851.673170] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780391, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.689168] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780389, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.694503] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780390, 'name': CreateVM_Task, 'duration_secs': 0.599466} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.694698] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 851.695876] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.695876] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.696304] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 851.696658] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfc1a233-bf32-4aa6-824d-ec625216932d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.702550] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 851.702550] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ff8a7-1989-4d73-16dd-69560d2a0174" [ 851.702550] env[68244]: _type = "Task" [ 851.702550] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.712310] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ff8a7-1989-4d73-16dd-69560d2a0174, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.755700] env[68244]: DEBUG oslo_vmware.api [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780392, 'name': PowerOffVM_Task, 'duration_secs': 0.181537} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.756066] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.756288] env[68244]: DEBUG nova.compute.manager [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.757150] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7f4056-9b29-4db3-80f3-97f95dd79c28 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.796064] env[68244]: DEBUG nova.compute.manager [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 851.796255] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 851.797453] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e8091b-fc6c-434f-9872-2c426c26f763 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.806132] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 851.806390] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36db9ec7-cfb8-4732-9b07-3337f58f4475 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.876059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.290s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.878934] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.675s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.879221] env[68244]: DEBUG nova.objects.instance [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lazy-loading 'resources' on Instance uuid f113bb6c-f05a-4253-98af-ca827fcbb723 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.896044] env[68244]: INFO nova.scheduler.client.report [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Deleted allocations for instance 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f [ 851.984026] env[68244]: DEBUG oslo_concurrency.lockutils [None req-331058a5-56d9-49c0-a387-0cf5601515b0 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.362s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.000225] env[68244]: DEBUG nova.compute.manager [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 852.002824] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832a4cc1-0e4c-408b-94e7-7b7b6289c558 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.133532] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 852.133878] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 852.134222] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleting the datastore file [datastore2] 100ec1f9-6776-4832-a4c2-e9a4def0d350 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.134646] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35d0b451-dcbe-4dc5-898a-b88c417bd40c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.143817] env[68244]: DEBUG oslo_vmware.api [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 852.143817] env[68244]: value = "task-2780394" [ 852.143817] env[68244]: _type = "Task" [ 852.143817] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.158536] env[68244]: DEBUG oslo_vmware.api [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780394, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.173092] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780391, 'name': ReconfigVM_Task, 'duration_secs': 0.8261} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.176172] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 8c00240d-5124-4ada-bd4d-4acd39a345c8/8c00240d-5124-4ada-bd4d-4acd39a345c8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.176897] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c96eca3-a943-4b3b-8bb5-e075defe07fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.184469] env[68244]: DEBUG oslo_vmware.api [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780389, 'name': RemoveSnapshot_Task, 'duration_secs': 1.212891} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.185767] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 852.186955] env[68244]: INFO nova.compute.manager [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Took 18.71 seconds to snapshot the instance on the hypervisor. [ 852.188659] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 852.188659] env[68244]: value = "task-2780395" [ 852.188659] env[68244]: _type = "Task" [ 852.188659] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.198101] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780395, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.215713] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ff8a7-1989-4d73-16dd-69560d2a0174, 'name': SearchDatastore_Task, 'duration_secs': 0.009438} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.215713] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.215713] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 852.215972] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.216137] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.216357] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.216649] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b651e0d6-a5c1-4b10-8178-c60c490bcb6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.228434] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.228434] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 852.228893] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2d34121-8c3b-4b90-9e50-fa172d3a08dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.235308] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 852.235308] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bf8062-ac03-b776-30d4-6ad3a40b5611" [ 852.235308] env[68244]: _type = "Task" [ 852.235308] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.243644] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.243843] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.253057] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bf8062-ac03-b776-30d4-6ad3a40b5611, 'name': SearchDatastore_Task, 'duration_secs': 0.012634} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.254095] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35f43c00-c72a-4b56-853c-86de3d1ee780 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.259830] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 852.259830] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52295568-ec27-bcd8-b237-5110a24b8103" [ 852.259830] env[68244]: _type = "Task" [ 852.259830] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.273034] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52295568-ec27-bcd8-b237-5110a24b8103, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.273710] env[68244]: DEBUG oslo_concurrency.lockutils [None req-429a1a3b-5999-493a-9588-1e4d6fb25cb1 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.303763] env[68244]: DEBUG nova.network.neutron [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Updated VIF entry in instance network info cache for port b9e56a35-cb81-41d0-81d4-951205f52fa0. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.304147] env[68244]: DEBUG nova.network.neutron [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Updating instance_info_cache with network_info: [{"id": "b9e56a35-cb81-41d0-81d4-951205f52fa0", "address": "fa:16:3e:ef:78:34", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9e56a35-cb", "ovs_interfaceid": "b9e56a35-cb81-41d0-81d4-951205f52fa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.406437] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ad9f3674-5994-48cd-a2d4-fe490626cf0a tempest-ServersAdminNegativeTestJSON-2045109121 tempest-ServersAdminNegativeTestJSON-2045109121-project-member] Lock "511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.403s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.486800] env[68244]: DEBUG nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 852.514531] env[68244]: INFO nova.compute.manager [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] instance snapshotting [ 852.520551] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8b240a-6254-4ea8-bff7-8819e9be1ea0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.545999] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d499176-cd22-4ec5-b778-5e1d37984729 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.602356] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 852.606044] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559013', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'name': 'volume-9f21b969-836e-43d7-9941-918bcd8d1ee8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f', 'attached_at': '', 'detached_at': '', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'serial': '9f21b969-836e-43d7-9941-918bcd8d1ee8'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 852.606044] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b05063-bba5-42de-8f4a-ff4106807a7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.627069] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eae9658-05ff-48bf-8378-6a9f42dfbf92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.652123] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] volume-9f21b969-836e-43d7-9941-918bcd8d1ee8/volume-9f21b969-836e-43d7-9941-918bcd8d1ee8.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.657901] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f4a5810-9659-41f2-8352-c1409779158c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.676238] env[68244]: DEBUG oslo_vmware.api [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780394, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.408432} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.677727] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.678202] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 852.678746] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 852.679197] env[68244]: INFO nova.compute.manager [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Took 0.88 seconds to destroy the instance on the hypervisor. [ 852.679588] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.680722] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 852.680722] env[68244]: value = "task-2780396" [ 852.680722] env[68244]: _type = "Task" [ 852.680722] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.682887] env[68244]: DEBUG nova.compute.manager [-] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 852.683051] env[68244]: DEBUG nova.network.neutron [-] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 852.695843] env[68244]: DEBUG nova.compute.manager [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Instance disappeared during snapshot {{(pid=68244) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 852.697797] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.705896] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780395, 'name': Rename_Task, 'duration_secs': 0.252669} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.706221] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.706522] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b897b8b1-346b-4d38-8404-0219a15b2878 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.713054] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 852.713054] env[68244]: value = "task-2780397" [ 852.713054] env[68244]: _type = "Task" [ 852.713054] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.716865] env[68244]: DEBUG nova.compute.manager [None req-4650c05b-d9ac-429b-bdd0-3c0cbe62c197 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image not found during clean up e7140df0-f9b6-40bd-8576-e37095b62021 {{(pid=68244) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 852.725500] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.771039] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52295568-ec27-bcd8-b237-5110a24b8103, 'name': SearchDatastore_Task, 'duration_secs': 0.01134} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.774817] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.775123] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 184f7694-9cab-4184-a1c0-926763a81baf/184f7694-9cab-4184-a1c0-926763a81baf.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 852.775607] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ceb674e-fd3f-4f0c-b5cf-74d4def2ea3e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.783366] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 852.783366] env[68244]: value = "task-2780398" [ 852.783366] env[68244]: _type = "Task" [ 852.783366] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.792116] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.807435] env[68244]: DEBUG oslo_concurrency.lockutils [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] Releasing lock "refresh_cache-184f7694-9cab-4184-a1c0-926763a81baf" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.807734] env[68244]: DEBUG nova.compute.manager [req-72a79ce0-6957-4060-8c19-199ac8b7fe32 req-fb4f864c-b554-4ed8-94eb-32d86b9ae2b4 service nova] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Received event network-vif-deleted-e39c0e00-ee59-4d80-b276-18ca3d5cb12f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 852.998376] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9c9fc9-4914-4bb3-a021-a29f0642923e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.006687] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d6386c-d862-48dd-a97a-7ee29f3515d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.011017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.047962] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7435c45-b70c-4453-9dec-489162f05fd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.056857] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af680952-6bed-49ff-a236-3545663e744c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.062709] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 853.063054] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f2c9b7bc-8bcc-4ef8-821d-a409ec94b955 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.076787] env[68244]: DEBUG nova.compute.provider_tree [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.080212] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 853.080212] env[68244]: value = "task-2780399" [ 853.080212] env[68244]: _type = "Task" [ 853.080212] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.089826] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780399, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.197411] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.225305] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780397, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.298052] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780398, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.506908] env[68244]: DEBUG nova.compute.manager [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Received event network-changed-cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 853.506908] env[68244]: DEBUG nova.compute.manager [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Refreshing instance network info cache due to event network-changed-cb5af382-be2b-47cb-8edb-e23ce944a42d. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 853.506908] env[68244]: DEBUG oslo_concurrency.lockutils [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] Acquiring lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.507247] env[68244]: DEBUG oslo_concurrency.lockutils [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] Acquired lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.509483] env[68244]: DEBUG nova.network.neutron [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Refreshing network info cache for port cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.582571] env[68244]: DEBUG nova.scheduler.client.report [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 853.597954] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780399, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.698146] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780396, 'name': ReconfigVM_Task, 'duration_secs': 0.810236} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.698470] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Reconfigured VM instance instance-0000000a to attach disk [datastore2] volume-9f21b969-836e-43d7-9941-918bcd8d1ee8/volume-9f21b969-836e-43d7-9941-918bcd8d1ee8.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.704642] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa2d323b-b69b-492f-8124-50201e87c51a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.725022] env[68244]: DEBUG oslo_vmware.api [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780397, 'name': PowerOnVM_Task, 'duration_secs': 0.782023} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.726401] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.726709] env[68244]: INFO nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Took 9.21 seconds to spawn the instance on the hypervisor. [ 853.727088] env[68244]: DEBUG nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.727536] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 853.727536] env[68244]: value = "task-2780400" [ 853.727536] env[68244]: _type = "Task" [ 853.727536] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.728281] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f0d0f5-9455-4ad5-8c8b-389eb32ce40b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.742524] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780400, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.778610] env[68244]: DEBUG nova.network.neutron [-] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.798571] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780398, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57743} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.798571] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 184f7694-9cab-4184-a1c0-926763a81baf/184f7694-9cab-4184-a1c0-926763a81baf.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 853.798571] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 853.798571] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb9e3d4f-0f7e-4ce3-b351-c855e9440788 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.805138] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 853.805138] env[68244]: value = "task-2780401" [ 853.805138] env[68244]: _type = "Task" [ 853.805138] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.813717] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.092249] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.098620] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.944s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.098884] env[68244]: DEBUG nova.objects.instance [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lazy-loading 'resources' on Instance uuid f5724973-2349-481c-b2ba-d1287f09c1db {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.105858] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780399, 'name': CreateSnapshot_Task, 'duration_secs': 0.885695} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.106113] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 854.107485] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745e66b1-da96-435f-a685-eba3878fe02c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.132555] env[68244]: INFO nova.scheduler.client.report [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Deleted allocations for instance f113bb6c-f05a-4253-98af-ca827fcbb723 [ 854.243533] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780400, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.251483] env[68244]: INFO nova.compute.manager [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Took 57.13 seconds to build instance. [ 854.265208] env[68244]: DEBUG nova.objects.instance [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lazy-loading 'flavor' on Instance uuid b0b79f25-f97d-4d59-ae80-2f8c09201073 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.280363] env[68244]: INFO nova.compute.manager [-] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Took 1.60 seconds to deallocate network for instance. [ 854.321279] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126094} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.321589] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 854.322410] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a50ca7-0e25-45bb-ad10-55400d7ec7ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.327655] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.327957] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.328096] env[68244]: INFO nova.compute.manager [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Rebooting instance [ 854.350169] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 184f7694-9cab-4184-a1c0-926763a81baf/184f7694-9cab-4184-a1c0-926763a81baf.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 854.350978] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0066aa1f-9ef3-4800-8cbf-c7b7bcd2fc03 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.371702] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 854.371702] env[68244]: value = "task-2780402" [ 854.371702] env[68244]: _type = "Task" [ 854.371702] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.375184] env[68244]: DEBUG nova.network.neutron [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updated VIF entry in instance network info cache for port cb5af382-be2b-47cb-8edb-e23ce944a42d. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.375517] env[68244]: DEBUG nova.network.neutron [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updating instance_info_cache with network_info: [{"id": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "address": "fa:16:3e:99:b9:a1", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb5af382-be", "ovs_interfaceid": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.381618] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780402, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.624985] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 854.628707] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-40c1afe3-925e-47e8-82b3-24eda8d840df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.636966] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 854.636966] env[68244]: value = "task-2780403" [ 854.636966] env[68244]: _type = "Task" [ 854.636966] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.649157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8860b29-07c5-46f8-8f5c-62718cac57df tempest-ServersListShow296Test-2031851567 tempest-ServersListShow296Test-2031851567-project-member] Lock "f113bb6c-f05a-4253-98af-ca827fcbb723" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.705s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.650684] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780403, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.741240] env[68244]: DEBUG oslo_vmware.api [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780400, 'name': ReconfigVM_Task, 'duration_secs': 0.83163} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.743931] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559013', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'name': 'volume-9f21b969-836e-43d7-9941-918bcd8d1ee8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f', 'attached_at': '', 'detached_at': '', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'serial': '9f21b969-836e-43d7-9941-918bcd8d1ee8'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 854.753244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-033800d3-2514-407b-8da1-0665e2fd9570 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.026s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.775331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.775331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquired lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.775610] env[68244]: DEBUG nova.network.neutron [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.775757] env[68244]: DEBUG nova.objects.instance [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lazy-loading 'info_cache' on Instance uuid b0b79f25-f97d-4d59-ae80-2f8c09201073 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.788386] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.854668] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.882778] env[68244]: DEBUG oslo_concurrency.lockutils [req-9481694d-c703-4e36-8f9c-45f84c88d7c1 req-cce6252f-ca00-416d-820c-0b18abd5fb6b service nova] Releasing lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.883179] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquired lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.884029] env[68244]: DEBUG nova.network.neutron [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.891860] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780402, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.135029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c13ab9c-472f-4c4e-9bd6-966c7aaa59cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.149317] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780403, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.152498] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f0bb30-abde-4f10-9d46-85815b8dadf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.188755] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689ba5e1-34ad-49c6-b86e-a3897e8ef668 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.198022] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab27cff-30dc-4d2f-9012-92c53124f3fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.211207] env[68244]: DEBUG nova.compute.provider_tree [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.257043] env[68244]: DEBUG nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 855.283331] env[68244]: DEBUG nova.objects.base [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 855.386302] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780402, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.514934] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "8c00240d-5124-4ada-bd4d-4acd39a345c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.515309] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.516080] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "8c00240d-5124-4ada-bd4d-4acd39a345c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.517041] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.517329] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.522993] env[68244]: INFO nova.compute.manager [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Terminating instance [ 855.651706] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780403, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.666534] env[68244]: DEBUG nova.network.neutron [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updating instance_info_cache with network_info: [{"id": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "address": "fa:16:3e:99:b9:a1", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb5af382-be", "ovs_interfaceid": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.686218] env[68244]: DEBUG nova.compute.manager [req-89b91f54-9f71-4ec9-9308-10f9195ef6a1 req-45bbcfc4-3b73-4bde-a46f-6ce578d015cb service nova] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Received event network-vif-deleted-23889c4c-9b8b-4405-b957-90fda02d4ef5 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 855.716115] env[68244]: DEBUG nova.scheduler.client.report [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.783864] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.797884] env[68244]: DEBUG nova.objects.instance [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lazy-loading 'flavor' on Instance uuid f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.889381] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780402, 'name': ReconfigVM_Task, 'duration_secs': 1.352243} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.889663] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 184f7694-9cab-4184-a1c0-926763a81baf/184f7694-9cab-4184-a1c0-926763a81baf.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 855.890388] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c95bd252-abff-4070-9eed-5cab32cf0312 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.902191] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 855.902191] env[68244]: value = "task-2780404" [ 855.902191] env[68244]: _type = "Task" [ 855.902191] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.911923] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780404, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.029147] env[68244]: DEBUG nova.compute.manager [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 856.029147] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 856.030061] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00c598f-0b0b-429b-b3e6-2715e0c159b9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.038687] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 856.039022] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a20b17f4-bae1-454c-bf11-7e38440b8797 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.044267] env[68244]: DEBUG oslo_vmware.api [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 856.044267] env[68244]: value = "task-2780405" [ 856.044267] env[68244]: _type = "Task" [ 856.044267] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.056762] env[68244]: DEBUG oslo_vmware.api [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.111475] env[68244]: DEBUG nova.network.neutron [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Updating instance_info_cache with network_info: [{"id": "c231c79b-11e8-4987-8977-587e745b5cbe", "address": "fa:16:3e:33:f0:a3", "network": {"id": "9a1021eb-62ea-4f24-ba1a-2c6b6c4ec251", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-513539692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd31896823df441cb451756c990f51f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc231c79b-11", "ovs_interfaceid": "c231c79b-11e8-4987-8977-587e745b5cbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.151244] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780403, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.169352] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Releasing lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.222526] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.124s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.225053] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.251s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.228725] env[68244]: INFO nova.compute.claims [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.254034] env[68244]: INFO nova.scheduler.client.report [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Deleted allocations for instance f5724973-2349-481c-b2ba-d1287f09c1db [ 856.303512] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fced3a46-c975-4de0-a2ca-8254cd97f96f tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.330s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.416000] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780404, 'name': Rename_Task, 'duration_secs': 0.137653} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.416298] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 856.418584] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8f06388-3685-4fff-9667-7cfefb77aabf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.423417] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 856.423417] env[68244]: value = "task-2780406" [ 856.423417] env[68244]: _type = "Task" [ 856.423417] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.437033] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.555760] env[68244]: DEBUG oslo_vmware.api [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780405, 'name': PowerOffVM_Task, 'duration_secs': 0.227484} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.556205] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 856.556530] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 856.556901] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c00fae57-0cf2-48bf-ab23-3a5a66f43d6c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.614639] env[68244]: DEBUG oslo_concurrency.lockutils [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Releasing lock "refresh_cache-b0b79f25-f97d-4d59-ae80-2f8c09201073" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.624605] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 856.630317] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 856.630317] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleting the datastore file [datastore2] 8c00240d-5124-4ada-bd4d-4acd39a345c8 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 856.630317] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be962fbd-059a-43ce-aa41-6f2aa451563e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.637930] env[68244]: DEBUG oslo_vmware.api [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 856.637930] env[68244]: value = "task-2780408" [ 856.637930] env[68244]: _type = "Task" [ 856.637930] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.650742] env[68244]: DEBUG oslo_vmware.api [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.654484] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780403, 'name': CloneVM_Task, 'duration_secs': 1.795335} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.655238] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Created linked-clone VM from snapshot [ 856.656115] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14d6d29-bc71-4a8a-85b5-25ad60435d7c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.667149] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Uploading image 17fb1f0f-4b70-4dd4-8b94-3df3ff0bc7bf {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 856.676525] env[68244]: DEBUG nova.compute.manager [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 856.677738] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b46126a-fcba-46af-bdee-4eafe2983e59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.684030] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 856.684030] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-40aad1b8-a2d1-467e-a09b-61586f14ef2c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.697992] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 856.697992] env[68244]: value = "task-2780409" [ 856.697992] env[68244]: _type = "Task" [ 856.697992] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.708026] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780409, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.766701] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9bc20a18-380d-42ec-ab74-451a0f75f8d8 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "f5724973-2349-481c-b2ba-d1287f09c1db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.515s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.935085] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780406, 'name': PowerOnVM_Task, 'duration_secs': 0.487912} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.935231] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 856.935478] env[68244]: INFO nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Took 8.58 seconds to spawn the instance on the hypervisor. [ 856.935655] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 856.936451] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334d8ea5-cb91-4444-a250-2a62f99650de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.939213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.939439] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.148348] env[68244]: DEBUG oslo_vmware.api [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145545} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.148610] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 857.148817] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 857.149011] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 857.149880] env[68244]: INFO nova.compute.manager [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 857.149880] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 857.149880] env[68244]: DEBUG nova.compute.manager [-] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 857.149880] env[68244]: DEBUG nova.network.neutron [-] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 857.208367] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780409, 'name': Destroy_Task, 'duration_secs': 0.505284} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.208731] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Destroyed the VM [ 857.208965] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 857.209330] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9aa8266c-d279-47bc-b1c9-1b8edbe23608 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.216515] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 857.216515] env[68244]: value = "task-2780410" [ 857.216515] env[68244]: _type = "Task" [ 857.216515] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.228216] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780410, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.444604] env[68244]: INFO nova.compute.manager [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Detaching volume 9f21b969-836e-43d7-9941-918bcd8d1ee8 [ 857.461253] env[68244]: INFO nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Took 52.54 seconds to build instance. [ 857.501733] env[68244]: INFO nova.virt.block_device [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Attempting to driver detach volume 9f21b969-836e-43d7-9941-918bcd8d1ee8 from mountpoint /dev/sdb [ 857.501989] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 857.502254] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559013', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'name': 'volume-9f21b969-836e-43d7-9941-918bcd8d1ee8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f', 'attached_at': '', 'detached_at': '', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'serial': '9f21b969-836e-43d7-9941-918bcd8d1ee8'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 857.503463] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff4a196-e901-46d9-8e06-8c813bb278d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.530829] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e56a7aa-b4a6-4384-882d-5df532ecf20a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.541182] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db6100a-7d10-46f2-b773-0b7523007c33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.573742] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96362ae5-d425-44f3-8fc8-7088326fd8ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.591965] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] The volume has not been displaced from its original location: [datastore2] volume-9f21b969-836e-43d7-9941-918bcd8d1ee8/volume-9f21b969-836e-43d7-9941-918bcd8d1ee8.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 857.597326] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Reconfiguring VM instance instance-0000000a to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 857.600603] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-244e8d1e-022a-4ed4-9acb-446b2694b5c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.624489] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.624489] env[68244]: DEBUG oslo_vmware.api [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 857.624489] env[68244]: value = "task-2780411" [ 857.624489] env[68244]: _type = "Task" [ 857.624489] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.624489] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6cb742a-46d9-4527-b0a0-575a415bfa9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.640501] env[68244]: DEBUG oslo_vmware.api [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780411, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.640805] env[68244]: DEBUG oslo_vmware.api [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 857.640805] env[68244]: value = "task-2780412" [ 857.640805] env[68244]: _type = "Task" [ 857.640805] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.702204] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2bde3d-1d0c-444a-aa1c-3ab91b4e547a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.712403] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Doing hard reboot of VM {{(pid=68244) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 857.712988] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-af0b133d-e6aa-4ddd-a06e-9e8b40331170 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.728880] env[68244]: DEBUG oslo_vmware.api [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 857.728880] env[68244]: value = "task-2780413" [ 857.728880] env[68244]: _type = "Task" [ 857.728880] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.736701] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780410, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.742982] env[68244]: DEBUG oslo_vmware.api [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780413, 'name': ResetVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.872113] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a29682e-f26f-476b-90c9-b13b5eda2c4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.881221] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2c276f-a8ff-48bd-a7ae-4afafcb4cb85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.917288] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e3e2ee-fbac-4007-9b20-06e2b520723e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.926379] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27711b8f-0a31-40f6-aaba-974974f7e792 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.941623] env[68244]: DEBUG nova.compute.provider_tree [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 857.966508] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "184f7694-9cab-4184-a1c0-926763a81baf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.128s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.052964] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.054098] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.057950] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.057950] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.057950] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.059369] env[68244]: INFO nova.compute.manager [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Terminating instance [ 858.139349] env[68244]: DEBUG oslo_vmware.api [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780411, 'name': ReconfigVM_Task, 'duration_secs': 0.308519} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.140733] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Reconfigured VM instance instance-0000000a to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 858.146450] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63de69b6-d44c-4fbd-917f-41e07b87ffe4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.168554] env[68244]: DEBUG oslo_vmware.api [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780412, 'name': PowerOnVM_Task, 'duration_secs': 0.477265} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.169878] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.170124] env[68244]: DEBUG nova.compute.manager [None req-900d4066-5467-4cd5-9a1d-3fc957979b8e tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.170436] env[68244]: DEBUG oslo_vmware.api [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 858.170436] env[68244]: value = "task-2780414" [ 858.170436] env[68244]: _type = "Task" [ 858.170436] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.171167] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5011aed-38c3-4470-8d61-ca2882d547c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.182416] env[68244]: DEBUG oslo_vmware.api [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780414, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.222268] env[68244]: DEBUG nova.compute.manager [req-258a9907-845a-4591-8e52-fb196c3c73b2 req-33a9e4f5-fb08-4c61-9f46-b8f53906198c service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Received event network-vif-deleted-bb9be0d6-efed-48c5-898c-b87f41e103ab {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 858.222268] env[68244]: INFO nova.compute.manager [req-258a9907-845a-4591-8e52-fb196c3c73b2 req-33a9e4f5-fb08-4c61-9f46-b8f53906198c service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Neutron deleted interface bb9be0d6-efed-48c5-898c-b87f41e103ab; detaching it from the instance and deleting it from the info cache [ 858.222577] env[68244]: DEBUG nova.network.neutron [req-258a9907-845a-4591-8e52-fb196c3c73b2 req-33a9e4f5-fb08-4c61-9f46-b8f53906198c service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.242023] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780410, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.248825] env[68244]: DEBUG oslo_vmware.api [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780413, 'name': ResetVM_Task, 'duration_secs': 0.099887} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.249379] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Did hard reboot of VM {{(pid=68244) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 858.249597] env[68244]: DEBUG nova.compute.manager [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.250621] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ae38c7-2337-47dc-bb78-5c04ab041b3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.275082] env[68244]: DEBUG nova.network.neutron [-] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.464427] env[68244]: ERROR nova.scheduler.client.report [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [req-40fcde26-e377-48cd-9e50-685644f49d7c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-40fcde26-e377-48cd-9e50-685644f49d7c"}]} [ 858.471052] env[68244]: DEBUG nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 858.482155] env[68244]: DEBUG nova.scheduler.client.report [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 858.501008] env[68244]: DEBUG nova.scheduler.client.report [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 858.501248] env[68244]: DEBUG nova.compute.provider_tree [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 858.518260] env[68244]: DEBUG nova.scheduler.client.report [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 858.540462] env[68244]: DEBUG nova.scheduler.client.report [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 858.566185] env[68244]: DEBUG nova.compute.manager [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 858.566425] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.567335] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eede3c-c24b-4648-b97e-c0e182f7fecb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.586779] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.587244] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2877df0-b361-4f3f-9f6e-415d8db5d559 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.598718] env[68244]: DEBUG oslo_vmware.api [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 858.598718] env[68244]: value = "task-2780415" [ 858.598718] env[68244]: _type = "Task" [ 858.598718] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.608643] env[68244]: DEBUG oslo_vmware.api [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.693749] env[68244]: DEBUG oslo_vmware.api [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780414, 'name': ReconfigVM_Task, 'duration_secs': 0.175193} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.695054] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559013', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'name': 'volume-9f21b969-836e-43d7-9941-918bcd8d1ee8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f', 'attached_at': '', 'detached_at': '', 'volume_id': '9f21b969-836e-43d7-9941-918bcd8d1ee8', 'serial': '9f21b969-836e-43d7-9941-918bcd8d1ee8'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 858.729703] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3187439c-d8b1-415c-a9b2-09906681a8eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.735733] env[68244]: DEBUG oslo_vmware.api [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780410, 'name': RemoveSnapshot_Task, 'duration_secs': 1.465278} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.740668] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 858.750624] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf7336c-886f-495e-900d-2c0149badfc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.766689] env[68244]: DEBUG oslo_concurrency.lockutils [None req-79d3df3a-663e-4274-8b96-278d963d24d9 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.439s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.778776] env[68244]: INFO nova.compute.manager [-] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Took 1.63 seconds to deallocate network for instance. [ 858.795169] env[68244]: DEBUG nova.compute.manager [req-258a9907-845a-4591-8e52-fb196c3c73b2 req-33a9e4f5-fb08-4c61-9f46-b8f53906198c service nova] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Detach interface failed, port_id=bb9be0d6-efed-48c5-898c-b87f41e103ab, reason: Instance 8c00240d-5124-4ada-bd4d-4acd39a345c8 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 859.004189] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.116159] env[68244]: DEBUG oslo_vmware.api [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780415, 'name': PowerOffVM_Task, 'duration_secs': 0.480285} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.116427] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.116595] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.118123] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da85f0b1-f328-48e1-8b82-3b61689041dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.182365] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dd0240-61ef-4937-9642-4b471ff971ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.191607] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2462ce6-b727-4814-bc10-281fc4c08305 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.198881] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.198881] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.199328] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Deleting the datastore file [datastore2] aa7c6967-cd55-47fc-a2f5-db6e8d2e0307 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.200207] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc0348f4-a5de-4472-b45c-5c892fce8cd5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.231672] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0eb13f-5067-49c7-b1e2-b5b51020f5e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.235344] env[68244]: DEBUG oslo_vmware.api [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for the task: (returnval){ [ 859.235344] env[68244]: value = "task-2780417" [ 859.235344] env[68244]: _type = "Task" [ 859.235344] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.243879] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc879da5-3e43-4b60-93a2-201e2cfdce83 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.251039] env[68244]: DEBUG oslo_vmware.api [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.262040] env[68244]: DEBUG nova.compute.provider_tree [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 859.264363] env[68244]: WARNING nova.compute.manager [None req-7206246d-2cd0-4fa1-97d4-4ff4af13ae2b tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Image not found during snapshot: nova.exception.ImageNotFound: Image 17fb1f0f-4b70-4dd4-8b94-3df3ff0bc7bf could not be found. [ 859.299357] env[68244]: DEBUG nova.objects.instance [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lazy-loading 'flavor' on Instance uuid f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.303278] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.758391] env[68244]: DEBUG oslo_vmware.api [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Task: {'id': task-2780417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30879} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.758391] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.758391] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.758391] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.758391] env[68244]: INFO nova.compute.manager [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Took 1.19 seconds to destroy the instance on the hypervisor. [ 859.758391] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.758391] env[68244]: DEBUG nova.compute.manager [-] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 859.758391] env[68244]: DEBUG nova.network.neutron [-] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 859.812053] env[68244]: DEBUG nova.scheduler.client.report [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 76 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 859.812322] env[68244]: DEBUG nova.compute.provider_tree [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 76 to 77 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 859.812528] env[68244]: DEBUG nova.compute.provider_tree [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 860.309256] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3f5f6610-d061-47c7-9d9c-f2db3f768e4b tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.370s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.320466] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.095s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.320995] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 860.327018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.565s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.327018] env[68244]: INFO nova.compute.claims [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.670300] env[68244]: DEBUG nova.network.neutron [-] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.837140] env[68244]: DEBUG nova.compute.utils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 860.838553] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 860.839403] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 860.892333] env[68244]: DEBUG nova.policy [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ded03c2489d4b9a95b4f10a35eed6ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6539e5ec36f0484f85e61fa8b4ef3f9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 860.991582] env[68244]: DEBUG nova.compute.manager [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Received event network-changed-cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 860.991582] env[68244]: DEBUG nova.compute.manager [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Refreshing instance network info cache due to event network-changed-cb5af382-be2b-47cb-8edb-e23ce944a42d. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 860.991582] env[68244]: DEBUG oslo_concurrency.lockutils [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] Acquiring lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.992471] env[68244]: DEBUG oslo_concurrency.lockutils [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] Acquired lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.992674] env[68244]: DEBUG nova.network.neutron [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Refreshing network info cache for port cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.175072] env[68244]: INFO nova.compute.manager [-] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Took 1.42 seconds to deallocate network for instance. [ 861.349199] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 861.381897] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Successfully created port: 025021b3-9cd9-459b-929d-8c53b5721b24 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.512620] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "92ce8150-982b-4669-b27a-4afd5c85da86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.512827] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "92ce8150-982b-4669-b27a-4afd5c85da86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.548575] env[68244]: DEBUG nova.compute.manager [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 861.549891] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23629819-d84d-41fc-bca0-6128f74a1aae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.668750] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "2d9dbf75-992d-4932-bd5d-84462494ebe8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.669038] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.669261] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "2d9dbf75-992d-4932-bd5d-84462494ebe8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.669445] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.669632] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.672051] env[68244]: INFO nova.compute.manager [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Terminating instance [ 861.687100] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.764355] env[68244]: DEBUG nova.network.neutron [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updated VIF entry in instance network info cache for port cb5af382-be2b-47cb-8edb-e23ce944a42d. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 861.764705] env[68244]: DEBUG nova.network.neutron [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updating instance_info_cache with network_info: [{"id": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "address": "fa:16:3e:99:b9:a1", "network": {"id": "10580f78-1da7-4692-8516-d7746e122ce1", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-677429828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec5340c2b2a440d0ad5a75fd694ad71a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac563aa7-6d7c-4bd5-9241-7b3e11b8f22d", "external-id": "nsx-vlan-transportzone-730", "segmentation_id": 730, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb5af382-be", "ovs_interfaceid": "cb5af382-be2b-47cb-8edb-e23ce944a42d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.848111] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.848399] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.848600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "774ce6f8-6273-4f2b-b398-ee8c44d79520-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.848782] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.848950] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.851156] env[68244]: INFO nova.compute.manager [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Terminating instance [ 862.025101] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c33ea6-5475-4c65-8940-0c2a6a5a9c06 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.039411] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1e7599-1af2-4358-ac73-c95fe4c182db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.077856] env[68244]: INFO nova.compute.manager [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] instance snapshotting [ 862.078847] env[68244]: DEBUG nova.objects.instance [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'flavor' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 862.080834] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b0a99c-7658-459a-a2e1-099085fba1fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.092351] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20e14a6-552f-4a92-b49b-7b6d88a09ce8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.110027] env[68244]: DEBUG nova.compute.provider_tree [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.185454] env[68244]: DEBUG nova.compute.manager [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 862.185720] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.186753] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726fd9f0-07f1-432a-9eb6-24e1fe8fd8e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.195709] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.195968] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56c440cb-0862-476d-851c-7d90f3d7df46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.203810] env[68244]: DEBUG oslo_vmware.api [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 862.203810] env[68244]: value = "task-2780418" [ 862.203810] env[68244]: _type = "Task" [ 862.203810] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.214426] env[68244]: DEBUG oslo_vmware.api [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.273741] env[68244]: DEBUG oslo_concurrency.lockutils [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] Releasing lock "refresh_cache-774ce6f8-6273-4f2b-b398-ee8c44d79520" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.273741] env[68244]: DEBUG nova.compute.manager [req-6ee18990-b552-4c27-a597-7875922aad4a req-32a2ebe7-0401-4db1-ae23-242de33fa3d7 service nova] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Received event network-vif-deleted-1266d6b5-36fc-49f9-ab98-42add17e5a24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 862.362400] env[68244]: DEBUG nova.compute.manager [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 862.362639] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.363581] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551fc4bb-85dc-4303-b70a-7998041ebb77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.367673] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 862.375836] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.376116] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b084fee-9036-4971-a7e1-030fa8fb62dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.383796] env[68244]: DEBUG oslo_vmware.api [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 862.383796] env[68244]: value = "task-2780419" [ 862.383796] env[68244]: _type = "Task" [ 862.383796] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.395381] env[68244]: DEBUG oslo_vmware.api [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.397842] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 862.398081] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.398238] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 862.398437] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.398615] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 862.398765] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 862.398994] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 862.399196] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 862.399369] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 862.399530] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 862.399741] env[68244]: DEBUG nova.virt.hardware [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 862.400562] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6d77cb-e3b0-452c-8aa7-e1ceb4b6d086 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.409898] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab069a16-cd4c-426b-a13b-76dd161c8623 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.588854] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cccba22-f8ea-48f2-8695-c3a108ef5e44 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.608577] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a990223d-26be-42a0-b37b-20b6428af2d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.612519] env[68244]: DEBUG nova.scheduler.client.report [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.717696] env[68244]: DEBUG oslo_vmware.api [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780418, 'name': PowerOffVM_Task, 'duration_secs': 0.288882} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.717696] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.717696] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.717696] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43ead318-55a5-445c-9de2-114238cd41b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.757669] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "91232cad-54b3-45af-bb54-af268de182fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.758082] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "91232cad-54b3-45af-bb54-af268de182fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.801789] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.801887] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.802214] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleting the datastore file [datastore2] 2d9dbf75-992d-4932-bd5d-84462494ebe8 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.802555] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8075e80-5223-416f-8d83-697774a2c5d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.811764] env[68244]: DEBUG oslo_vmware.api [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 862.811764] env[68244]: value = "task-2780421" [ 862.811764] env[68244]: _type = "Task" [ 862.811764] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.821446] env[68244]: DEBUG oslo_vmware.api [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.895020] env[68244]: DEBUG oslo_vmware.api [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780419, 'name': PowerOffVM_Task, 'duration_secs': 0.261845} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.895375] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.895495] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.895779] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b44b4c9-ec9d-4315-ae87-ea6c475c48c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.983030] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.983356] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.983582] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Deleting the datastore file [datastore2] 774ce6f8-6273-4f2b-b398-ee8c44d79520 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.983913] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51e63a18-4388-4a71-aaf2-b295cba25720 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.991813] env[68244]: DEBUG oslo_vmware.api [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 862.991813] env[68244]: value = "task-2780423" [ 862.991813] env[68244]: _type = "Task" [ 862.991813] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.000918] env[68244]: DEBUG oslo_vmware.api [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.124827] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.801s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.126708] env[68244]: DEBUG nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 863.131512] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 863.131512] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.416s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.131512] env[68244]: DEBUG nova.objects.instance [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lazy-loading 'resources' on Instance uuid 086dda59-4bd2-4ca2-a758-c120f1271f42 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.132613] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6eaaa135-c5eb-4999-8016-1061eed608ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.140036] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 863.140036] env[68244]: value = "task-2780424" [ 863.140036] env[68244]: _type = "Task" [ 863.140036] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.151873] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780424, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.187329] env[68244]: DEBUG nova.compute.manager [req-687eb90c-c74c-4e05-af2c-706a909506ed req-8cf0cfa9-ef32-4f48-8ac6-920b0b8809dc service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Received event network-vif-plugged-025021b3-9cd9-459b-929d-8c53b5721b24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 863.187582] env[68244]: DEBUG oslo_concurrency.lockutils [req-687eb90c-c74c-4e05-af2c-706a909506ed req-8cf0cfa9-ef32-4f48-8ac6-920b0b8809dc service nova] Acquiring lock "874d6895-0f3d-4a99-b27a-cad627ddeecd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.187790] env[68244]: DEBUG oslo_concurrency.lockutils [req-687eb90c-c74c-4e05-af2c-706a909506ed req-8cf0cfa9-ef32-4f48-8ac6-920b0b8809dc service nova] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.187983] env[68244]: DEBUG oslo_concurrency.lockutils [req-687eb90c-c74c-4e05-af2c-706a909506ed req-8cf0cfa9-ef32-4f48-8ac6-920b0b8809dc service nova] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.188199] env[68244]: DEBUG nova.compute.manager [req-687eb90c-c74c-4e05-af2c-706a909506ed req-8cf0cfa9-ef32-4f48-8ac6-920b0b8809dc service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] No waiting events found dispatching network-vif-plugged-025021b3-9cd9-459b-929d-8c53b5721b24 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.188381] env[68244]: WARNING nova.compute.manager [req-687eb90c-c74c-4e05-af2c-706a909506ed req-8cf0cfa9-ef32-4f48-8ac6-920b0b8809dc service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Received unexpected event network-vif-plugged-025021b3-9cd9-459b-929d-8c53b5721b24 for instance with vm_state building and task_state spawning. [ 863.321668] env[68244]: DEBUG oslo_vmware.api [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31354} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.321927] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.322125] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.322303] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.323021] env[68244]: INFO nova.compute.manager [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 863.323021] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.323021] env[68244]: DEBUG nova.compute.manager [-] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 863.323021] env[68244]: DEBUG nova.network.neutron [-] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.340727] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Successfully updated port: 025021b3-9cd9-459b-929d-8c53b5721b24 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.505600] env[68244]: DEBUG oslo_vmware.api [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208552} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.506059] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.506297] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.506580] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.506840] env[68244]: INFO nova.compute.manager [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Took 1.14 seconds to destroy the instance on the hypervisor. [ 863.507141] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.507369] env[68244]: DEBUG nova.compute.manager [-] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 863.507494] env[68244]: DEBUG nova.network.neutron [-] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.632519] env[68244]: DEBUG nova.compute.utils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 863.633905] env[68244]: DEBUG nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 863.634147] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.652437] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780424, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.685172] env[68244]: DEBUG nova.compute.manager [req-d2a8dea2-8f91-4729-9c57-97b3e98d4634 req-1cb406ce-3147-4469-a0e1-83c96e53dc83 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Received event network-vif-deleted-62558620-9882-40c5-888d-85620dd9c6d9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 863.685172] env[68244]: INFO nova.compute.manager [req-d2a8dea2-8f91-4729-9c57-97b3e98d4634 req-1cb406ce-3147-4469-a0e1-83c96e53dc83 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Neutron deleted interface 62558620-9882-40c5-888d-85620dd9c6d9; detaching it from the instance and deleting it from the info cache [ 863.685172] env[68244]: DEBUG nova.network.neutron [req-d2a8dea2-8f91-4729-9c57-97b3e98d4634 req-1cb406ce-3147-4469-a0e1-83c96e53dc83 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.687746] env[68244]: DEBUG nova.policy [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c5aa01e2cf846d38e5a5020fe0c8ed7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f7f8ce7041b4550a74a2fb6fe5b4c6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.843860] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "refresh_cache-874d6895-0f3d-4a99-b27a-cad627ddeecd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.844224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "refresh_cache-874d6895-0f3d-4a99-b27a-cad627ddeecd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.844508] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.932948] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "6915d271-8346-41b5-a75b-2188fd3b57d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.933444] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.933695] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "6915d271-8346-41b5-a75b-2188fd3b57d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.933961] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.934205] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.939904] env[68244]: INFO nova.compute.manager [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Terminating instance [ 863.988963] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Successfully created port: baebfb02-7f73-4cd8-a535-e4630409c5c8 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.083796] env[68244]: DEBUG nova.network.neutron [-] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.138533] env[68244]: DEBUG nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 864.155433] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780424, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.169527] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5361fa9-8c0a-41d0-b1ef-84ee6f14dc5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.178190] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629929e0-dcd5-4012-b1cf-e28cadb528eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.208966] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8261cb7-0775-4d6a-ac0a-ae281367045f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.211274] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c744b54-373e-438a-bba3-1a4731f0f6ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.220586] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0925772c-a94a-4f72-a06e-9dea3fc9caa2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.227053] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0de04b-b0a4-401e-ac9f-51a27e8c74a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.247661] env[68244]: DEBUG nova.compute.provider_tree [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.260304] env[68244]: DEBUG nova.compute.manager [req-d2a8dea2-8f91-4729-9c57-97b3e98d4634 req-1cb406ce-3147-4469-a0e1-83c96e53dc83 service nova] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Detach interface failed, port_id=62558620-9882-40c5-888d-85620dd9c6d9, reason: Instance 2d9dbf75-992d-4932-bd5d-84462494ebe8 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 864.283570] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Successfully created port: eb9064b8-063e-4e1c-9628-f254f4758e7e {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.379795] env[68244]: DEBUG nova.network.neutron [-] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.410453] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.449774] env[68244]: DEBUG nova.compute.manager [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 864.450186] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 864.451080] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af91a161-03c4-407e-87f5-6f27e7ec5e63 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.460641] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.462900] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00da809b-1b5a-4b02-8642-ce8ea605378c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.469935] env[68244]: DEBUG oslo_vmware.api [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 864.469935] env[68244]: value = "task-2780425" [ 864.469935] env[68244]: _type = "Task" [ 864.469935] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.477932] env[68244]: DEBUG oslo_vmware.api [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780425, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.587960] env[68244]: INFO nova.compute.manager [-] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Took 1.26 seconds to deallocate network for instance. [ 864.635830] env[68244]: DEBUG nova.network.neutron [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Updating instance_info_cache with network_info: [{"id": "025021b3-9cd9-459b-929d-8c53b5721b24", "address": "fa:16:3e:50:a0:ed", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025021b3-9c", "ovs_interfaceid": "025021b3-9cd9-459b-929d-8c53b5721b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.661058] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780424, 'name': CreateSnapshot_Task, 'duration_secs': 1.411367} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.664646] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 864.665516] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4631baf-2fde-4fdd-b87f-ed2696a26aba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.762875] env[68244]: DEBUG nova.scheduler.client.report [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.882704] env[68244]: INFO nova.compute.manager [-] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Took 1.37 seconds to deallocate network for instance. [ 864.980556] env[68244]: DEBUG oslo_vmware.api [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780425, 'name': PowerOffVM_Task, 'duration_secs': 0.318727} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.980835] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 864.980995] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 864.981376] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d328a3b4-bd47-4c24-88fe-a987f095e2c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.072035] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 865.072035] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 865.072035] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleting the datastore file [datastore1] 6915d271-8346-41b5-a75b-2188fd3b57d1 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 865.072035] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f455133d-c416-471e-b317-4a906b28f0aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.079277] env[68244]: DEBUG oslo_vmware.api [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 865.079277] env[68244]: value = "task-2780427" [ 865.079277] env[68244]: _type = "Task" [ 865.079277] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.088443] env[68244]: DEBUG oslo_vmware.api [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.094588] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.138414] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "refresh_cache-874d6895-0f3d-4a99-b27a-cad627ddeecd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.138873] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Instance network_info: |[{"id": "025021b3-9cd9-459b-929d-8c53b5721b24", "address": "fa:16:3e:50:a0:ed", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025021b3-9c", "ovs_interfaceid": "025021b3-9cd9-459b-929d-8c53b5721b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 865.139334] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:a0:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '025021b3-9cd9-459b-929d-8c53b5721b24', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.148829] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 865.149594] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.149917] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8685f92-9c8c-4dbe-b2af-349fa1c71142 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.166898] env[68244]: DEBUG nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.175252] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.175252] env[68244]: value = "task-2780428" [ 865.175252] env[68244]: _type = "Task" [ 865.175252] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.187410] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 865.192969] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1e9f32dc-6da1-49f8-b1b0-ae3ca2bc6b17 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.196341] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780428, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.202473] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 865.202473] env[68244]: value = "task-2780429" [ 865.202473] env[68244]: _type = "Task" [ 865.202473] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.204940] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.209076] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.209486] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.209562] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.209673] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.209893] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.210164] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.210349] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.210535] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.210730] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.210912] env[68244]: DEBUG nova.virt.hardware [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.211847] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d150356-690d-4076-b9d0-66be503fbd58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.227196] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e7b435-e78b-42f9-9cdc-b914be9c787e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.230488] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780429, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.234945] env[68244]: DEBUG nova.compute.manager [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Received event network-changed-025021b3-9cd9-459b-929d-8c53b5721b24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 865.235170] env[68244]: DEBUG nova.compute.manager [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Refreshing instance network info cache due to event network-changed-025021b3-9cd9-459b-929d-8c53b5721b24. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 865.235458] env[68244]: DEBUG oslo_concurrency.lockutils [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] Acquiring lock "refresh_cache-874d6895-0f3d-4a99-b27a-cad627ddeecd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.235659] env[68244]: DEBUG oslo_concurrency.lockutils [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] Acquired lock "refresh_cache-874d6895-0f3d-4a99-b27a-cad627ddeecd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.235842] env[68244]: DEBUG nova.network.neutron [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Refreshing network info cache for port 025021b3-9cd9-459b-929d-8c53b5721b24 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.269188] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.272841] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.487s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.273339] env[68244]: DEBUG nova.objects.instance [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lazy-loading 'resources' on Instance uuid 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.293969] env[68244]: INFO nova.scheduler.client.report [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleted allocations for instance 086dda59-4bd2-4ca2-a758-c120f1271f42 [ 865.392471] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.591483] env[68244]: DEBUG oslo_vmware.api [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184367} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.591860] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 865.592235] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 865.592475] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 865.592768] env[68244]: INFO nova.compute.manager [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 865.593496] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 865.593496] env[68244]: DEBUG nova.compute.manager [-] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 865.593616] env[68244]: DEBUG nova.network.neutron [-] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 865.684943] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780428, 'name': CreateVM_Task, 'duration_secs': 0.349242} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.685160] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.685887] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.686066] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.686381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 865.686660] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5e2a25b-c252-41e9-b786-5af920ca3c75 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.691836] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 865.691836] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f1f9a7-a0a9-3a83-589b-2ab172b3fa08" [ 865.691836] env[68244]: _type = "Task" [ 865.691836] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.701633] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f1f9a7-a0a9-3a83-589b-2ab172b3fa08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.717084] env[68244]: DEBUG nova.compute.manager [req-64cbcf4e-e976-4b00-850a-59cd71307d1f req-19bde7a9-4194-40fb-9943-7cb53ea1b7cc service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received event network-vif-plugged-baebfb02-7f73-4cd8-a535-e4630409c5c8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 865.717298] env[68244]: DEBUG oslo_concurrency.lockutils [req-64cbcf4e-e976-4b00-850a-59cd71307d1f req-19bde7a9-4194-40fb-9943-7cb53ea1b7cc service nova] Acquiring lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.717528] env[68244]: DEBUG oslo_concurrency.lockutils [req-64cbcf4e-e976-4b00-850a-59cd71307d1f req-19bde7a9-4194-40fb-9943-7cb53ea1b7cc service nova] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.717698] env[68244]: DEBUG oslo_concurrency.lockutils [req-64cbcf4e-e976-4b00-850a-59cd71307d1f req-19bde7a9-4194-40fb-9943-7cb53ea1b7cc service nova] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.717863] env[68244]: DEBUG nova.compute.manager [req-64cbcf4e-e976-4b00-850a-59cd71307d1f req-19bde7a9-4194-40fb-9943-7cb53ea1b7cc service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] No waiting events found dispatching network-vif-plugged-baebfb02-7f73-4cd8-a535-e4630409c5c8 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 865.718037] env[68244]: WARNING nova.compute.manager [req-64cbcf4e-e976-4b00-850a-59cd71307d1f req-19bde7a9-4194-40fb-9943-7cb53ea1b7cc service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received unexpected event network-vif-plugged-baebfb02-7f73-4cd8-a535-e4630409c5c8 for instance with vm_state building and task_state spawning. [ 865.724169] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780429, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.756827] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Successfully updated port: baebfb02-7f73-4cd8-a535-e4630409c5c8 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.803871] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5e0b4627-9032-42f4-b4dd-88cf3b4f0e8f tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "086dda59-4bd2-4ca2-a758-c120f1271f42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.837s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.175429] env[68244]: DEBUG nova.network.neutron [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Updated VIF entry in instance network info cache for port 025021b3-9cd9-459b-929d-8c53b5721b24. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.175868] env[68244]: DEBUG nova.network.neutron [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Updating instance_info_cache with network_info: [{"id": "025021b3-9cd9-459b-929d-8c53b5721b24", "address": "fa:16:3e:50:a0:ed", "network": {"id": "5a2eb223-d5e4-4c0d-a3ce-a5281dae0407", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-53905553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6539e5ec36f0484f85e61fa8b4ef3f9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025021b3-9c", "ovs_interfaceid": "025021b3-9cd9-459b-929d-8c53b5721b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.209959] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f1f9a7-a0a9-3a83-589b-2ab172b3fa08, 'name': SearchDatastore_Task, 'duration_secs': 0.010748} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.210359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.210661] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.210959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.211404] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.211404] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.211740] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec7a4d1d-0de3-4366-8358-a7602dd87835 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.230665] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780429, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.232044] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.232241] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.233086] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73823128-4432-4770-95b2-151120913c31 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.242037] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 866.242037] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7f1f1-4cfb-5580-d532-ac5f31d6ce5a" [ 866.242037] env[68244]: _type = "Task" [ 866.242037] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.251655] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7f1f1-4cfb-5580-d532-ac5f31d6ce5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.280025] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ec9a5b-195e-4d2c-96de-9cc9c09580a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.287865] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f1826e-b05a-4b37-b2a9-cfa800a95964 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.325752] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089a9dae-1458-4922-9f46-d6393bfef2ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.337027] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e948bc41-e17f-4b0a-9b1b-a495f483a9b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.349218] env[68244]: DEBUG nova.compute.provider_tree [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.484886] env[68244]: DEBUG nova.network.neutron [-] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.679423] env[68244]: DEBUG oslo_concurrency.lockutils [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] Releasing lock "refresh_cache-874d6895-0f3d-4a99-b27a-cad627ddeecd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.679731] env[68244]: DEBUG nova.compute.manager [req-1f15f9be-0f54-490e-9bb2-0ac9c3f773f9 req-5e2e3ee7-4b07-4216-918b-ff5a91c22c5d service nova] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Received event network-vif-deleted-cb5af382-be2b-47cb-8edb-e23ce944a42d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 866.724008] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780429, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.753375] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7f1f1-4cfb-5580-d532-ac5f31d6ce5a, 'name': SearchDatastore_Task, 'duration_secs': 0.011445} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.754366] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce14044d-79fd-46f6-96c1-e3c263f6c76b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.761023] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 866.761023] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52281b15-c96d-c805-060a-257650e3e8d6" [ 866.761023] env[68244]: _type = "Task" [ 866.761023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.769169] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52281b15-c96d-c805-060a-257650e3e8d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.854184] env[68244]: DEBUG nova.scheduler.client.report [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.986609] env[68244]: INFO nova.compute.manager [-] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Took 1.39 seconds to deallocate network for instance. [ 867.229263] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780429, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.271997] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52281b15-c96d-c805-060a-257650e3e8d6, 'name': SearchDatastore_Task, 'duration_secs': 0.027664} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.272307] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.272577] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 874d6895-0f3d-4a99-b27a-cad627ddeecd/874d6895-0f3d-4a99-b27a-cad627ddeecd.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.272848] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a5f28e7-4b49-42af-aba1-9e6076402f33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.279933] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 867.279933] env[68244]: value = "task-2780430" [ 867.279933] env[68244]: _type = "Task" [ 867.279933] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.288782] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.291345] env[68244]: DEBUG nova.compute.manager [req-9ba2c5bf-7709-49d9-bd12-fe7d1f084e19 req-45d35d38-d01c-4e34-8dcb-1ce477f46318 service nova] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Received event network-vif-deleted-7179ec75-5350-44a8-97c4-c2a8d408a496 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 867.359543] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.086s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.362991] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.281s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.366317] env[68244]: INFO nova.compute.claims [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.386349] env[68244]: INFO nova.scheduler.client.report [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Deleted allocations for instance 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f [ 867.496834] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.728794] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780429, 'name': CloneVM_Task, 'duration_secs': 2.267083} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.729151] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Created linked-clone VM from snapshot [ 867.729992] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0be6303-fe97-4441-94d1-9c67d2720ba3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.738083] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Uploading image b821da70-f5be-4fe6-8e00-4567c035cf0f {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 867.755870] env[68244]: DEBUG nova.compute.manager [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received event network-changed-baebfb02-7f73-4cd8-a535-e4630409c5c8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 867.756132] env[68244]: DEBUG nova.compute.manager [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Refreshing instance network info cache due to event network-changed-baebfb02-7f73-4cd8-a535-e4630409c5c8. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 867.756408] env[68244]: DEBUG oslo_concurrency.lockutils [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] Acquiring lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.756582] env[68244]: DEBUG oslo_concurrency.lockutils [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] Acquired lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.756744] env[68244]: DEBUG nova.network.neutron [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Refreshing network info cache for port baebfb02-7f73-4cd8-a535-e4630409c5c8 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.767704] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 867.767704] env[68244]: value = "vm-559019" [ 867.767704] env[68244]: _type = "VirtualMachine" [ 867.767704] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 867.768748] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-222385ff-3cb9-40a6-8501-8b73506f1482 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.777296] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lease: (returnval){ [ 867.777296] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256cd10-7e50-1d1c-3817-ae8f210f91c3" [ 867.777296] env[68244]: _type = "HttpNfcLease" [ 867.777296] env[68244]: } obtained for exporting VM: (result){ [ 867.777296] env[68244]: value = "vm-559019" [ 867.777296] env[68244]: _type = "VirtualMachine" [ 867.777296] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 867.777296] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the lease: (returnval){ [ 867.777296] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256cd10-7e50-1d1c-3817-ae8f210f91c3" [ 867.777296] env[68244]: _type = "HttpNfcLease" [ 867.777296] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 867.787800] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 867.787800] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256cd10-7e50-1d1c-3817-ae8f210f91c3" [ 867.787800] env[68244]: _type = "HttpNfcLease" [ 867.787800] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 867.791238] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780430, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.897634] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3fca0942-e20a-476a-bdc7-3cb81d9fde28 tempest-ListServersNegativeTestJSON-732707722 tempest-ListServersNegativeTestJSON-732707722-project-member] Lock "4c394e1d-8fef-4b7a-ac9f-550f263c1d7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.816s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.206374] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Successfully updated port: eb9064b8-063e-4e1c-9628-f254f4758e7e {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.290624] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 868.290624] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256cd10-7e50-1d1c-3817-ae8f210f91c3" [ 868.290624] env[68244]: _type = "HttpNfcLease" [ 868.290624] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 868.294205] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 868.294205] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256cd10-7e50-1d1c-3817-ae8f210f91c3" [ 868.294205] env[68244]: _type = "HttpNfcLease" [ 868.294205] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 868.294480] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513316} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.295281] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fbb67b-f263-4f0e-8fc2-7a07e8de2f72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.297685] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 874d6895-0f3d-4a99-b27a-cad627ddeecd/874d6895-0f3d-4a99-b27a-cad627ddeecd.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.297890] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.301155] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db0f53a5-b77a-42c4-832f-e52877604cb2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.305533] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527ab823-6075-1688-c661-240655e3a212/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 868.305728] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527ab823-6075-1688-c661-240655e3a212/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 868.308111] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 868.308111] env[68244]: value = "task-2780432" [ 868.308111] env[68244]: _type = "Task" [ 868.308111] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.372360] env[68244]: DEBUG nova.network.neutron [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.383409] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780432, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073725} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.383987] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 868.384841] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79374be-50c5-44cc-b1a1-12c23db5a43e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.406647] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 874d6895-0f3d-4a99-b27a-cad627ddeecd/874d6895-0f3d-4a99-b27a-cad627ddeecd.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.411614] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2786ebc6-d1d2-4d33-bc62-aae4caa6ba02 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.429444] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-09ab1283-a606-46d4-9833-59e5b11a3626 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.434932] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 868.434932] env[68244]: value = "task-2780433" [ 868.434932] env[68244]: _type = "Task" [ 868.434932] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.442841] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.530504] env[68244]: DEBUG nova.network.neutron [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.713099] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.900055] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36762d4-ab70-42fa-b675-fc2c04030a4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.908895] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee7f23c-e0a4-4c59-bcb3-4d8440f79c11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.948696] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8732a03-266a-401f-aeea-9701c9d379d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.960568] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1f4178-bc9c-4e9d-bf48-1c49302d36cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.964531] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.979688] env[68244]: DEBUG nova.compute.provider_tree [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.032994] env[68244]: DEBUG oslo_concurrency.lockutils [req-b6de6dce-4ba8-45d8-8fa5-4a00938f2d9a req-6428d0ae-f8a0-4fb3-9967-681d0c64264e service nova] Releasing lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.033649] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.033839] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.333335] env[68244]: DEBUG nova.compute.manager [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received event network-vif-plugged-eb9064b8-063e-4e1c-9628-f254f4758e7e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 869.333335] env[68244]: DEBUG oslo_concurrency.lockutils [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] Acquiring lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.333335] env[68244]: DEBUG oslo_concurrency.lockutils [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.333335] env[68244]: DEBUG oslo_concurrency.lockutils [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.333761] env[68244]: DEBUG nova.compute.manager [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] No waiting events found dispatching network-vif-plugged-eb9064b8-063e-4e1c-9628-f254f4758e7e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 869.333841] env[68244]: WARNING nova.compute.manager [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received unexpected event network-vif-plugged-eb9064b8-063e-4e1c-9628-f254f4758e7e for instance with vm_state building and task_state spawning. [ 869.334680] env[68244]: DEBUG nova.compute.manager [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received event network-changed-eb9064b8-063e-4e1c-9628-f254f4758e7e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 869.334680] env[68244]: DEBUG nova.compute.manager [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Refreshing instance network info cache due to event network-changed-eb9064b8-063e-4e1c-9628-f254f4758e7e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 869.334680] env[68244]: DEBUG oslo_concurrency.lockutils [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] Acquiring lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.455198] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.484044] env[68244]: DEBUG nova.scheduler.client.report [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.599802] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.946517] env[68244]: DEBUG nova.network.neutron [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Updating instance_info_cache with network_info: [{"id": "baebfb02-7f73-4cd8-a535-e4630409c5c8", "address": "fa:16:3e:57:02:28", "network": {"id": "26c6bbde-aaf8-4f77-a991-2a6099666e81", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-78400576", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaebfb02-7f", "ovs_interfaceid": "baebfb02-7f73-4cd8-a535-e4630409c5c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb9064b8-063e-4e1c-9628-f254f4758e7e", "address": "fa:16:3e:09:65:4e", "network": {"id": "d6924dbd-44bb-496d-9693-886514b33949", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-193086191", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb9064b8-06", "ovs_interfaceid": "eb9064b8-063e-4e1c-9628-f254f4758e7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.958489] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780433, 'name': ReconfigVM_Task, 'duration_secs': 1.356672} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.958947] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 874d6895-0f3d-4a99-b27a-cad627ddeecd/874d6895-0f3d-4a99-b27a-cad627ddeecd.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.959792] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89fd23e0-d2f1-4478-a8ed-a1f0f12f6947 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.970037] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 869.970037] env[68244]: value = "task-2780434" [ 869.970037] env[68244]: _type = "Task" [ 869.970037] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.977140] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780434, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.989434] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.992162] env[68244]: DEBUG nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 869.996564] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.307s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.998282] env[68244]: INFO nova.compute.claims [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.454081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Releasing lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.454801] env[68244]: DEBUG nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Instance network_info: |[{"id": "baebfb02-7f73-4cd8-a535-e4630409c5c8", "address": "fa:16:3e:57:02:28", "network": {"id": "26c6bbde-aaf8-4f77-a991-2a6099666e81", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-78400576", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaebfb02-7f", "ovs_interfaceid": "baebfb02-7f73-4cd8-a535-e4630409c5c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb9064b8-063e-4e1c-9628-f254f4758e7e", "address": "fa:16:3e:09:65:4e", "network": {"id": "d6924dbd-44bb-496d-9693-886514b33949", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-193086191", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb9064b8-06", "ovs_interfaceid": "eb9064b8-063e-4e1c-9628-f254f4758e7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 870.455275] env[68244]: DEBUG oslo_concurrency.lockutils [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] Acquired lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.455382] env[68244]: DEBUG nova.network.neutron [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Refreshing network info cache for port eb9064b8-063e-4e1c-9628-f254f4758e7e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.456958] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:02:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4fcde7-8926-402a-a9b7-4878d2bc1cf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'baebfb02-7f73-4cd8-a535-e4630409c5c8', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:65:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '419a5b3f-4c6f-4168-9def-746b4d8c5c24', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb9064b8-063e-4e1c-9628-f254f4758e7e', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.467018] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.467635] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.469512] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0a94ce4-f448-401d-8d64-a53161c227db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.498822] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.498822] env[68244]: value = "task-2780435" [ 870.498822] env[68244]: _type = "Task" [ 870.498822] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.499653] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780434, 'name': Rename_Task, 'duration_secs': 0.149841} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.499851] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.505635] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e97d01d-08c0-4c09-a739-59ca851b4866 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.508297] env[68244]: DEBUG nova.compute.utils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 870.515106] env[68244]: DEBUG nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 870.515106] env[68244]: DEBUG nova.network.neutron [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 870.523339] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 870.523339] env[68244]: value = "task-2780436" [ 870.523339] env[68244]: _type = "Task" [ 870.523339] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.532781] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.609985] env[68244]: DEBUG nova.policy [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '395d6679a62746ef8ed6f6f581c22944', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd41b4d274faa4f5a8951d39fa0d0c714', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 871.009818] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780435, 'name': CreateVM_Task, 'duration_secs': 0.445609} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.013021] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.013021] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.013021] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.013021] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.013021] env[68244]: DEBUG nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 871.014800] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0e60e27-fb0b-41b9-addc-08f408278477 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.025509] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 871.025509] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5284ee25-f035-7b41-dd16-60aa136ef0fd" [ 871.025509] env[68244]: _type = "Task" [ 871.025509] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.043440] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780436, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.047767] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5284ee25-f035-7b41-dd16-60aa136ef0fd, 'name': SearchDatastore_Task, 'duration_secs': 0.01124} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.052336] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.052623] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 871.053175] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.053539] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.053895] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 871.057423] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70ec7ad2-b485-4021-a34a-df6d9646711c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.068528] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 871.068977] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 871.073042] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c65fa0a1-9ae4-4971-b5b8-7fc5a26b03c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.080968] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 871.080968] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f51e22-b300-d51c-b3db-6850546671fc" [ 871.080968] env[68244]: _type = "Task" [ 871.080968] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.091861] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f51e22-b300-d51c-b3db-6850546671fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.360614] env[68244]: DEBUG nova.network.neutron [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Successfully created port: 9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 871.464268] env[68244]: DEBUG nova.network.neutron [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Updated VIF entry in instance network info cache for port eb9064b8-063e-4e1c-9628-f254f4758e7e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.464890] env[68244]: DEBUG nova.network.neutron [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Updating instance_info_cache with network_info: [{"id": "baebfb02-7f73-4cd8-a535-e4630409c5c8", "address": "fa:16:3e:57:02:28", "network": {"id": "26c6bbde-aaf8-4f77-a991-2a6099666e81", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-78400576", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4fcde7-8926-402a-a9b7-4878d2bc1cf6", "external-id": "nsx-vlan-transportzone-840", "segmentation_id": 840, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaebfb02-7f", "ovs_interfaceid": "baebfb02-7f73-4cd8-a535-e4630409c5c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb9064b8-063e-4e1c-9628-f254f4758e7e", "address": "fa:16:3e:09:65:4e", "network": {"id": "d6924dbd-44bb-496d-9693-886514b33949", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-193086191", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3f7f8ce7041b4550a74a2fb6fe5b4c6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb9064b8-06", "ovs_interfaceid": "eb9064b8-063e-4e1c-9628-f254f4758e7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.536832] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780436, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.592664] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f51e22-b300-d51c-b3db-6850546671fc, 'name': SearchDatastore_Task, 'duration_secs': 0.012419} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.594982] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c83f5c9-50bd-4814-900f-b0f7650b2543 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.597879] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aea770f6-23a8-47d8-b96a-c4a2df44cef1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.606488] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4932f351-8e99-4325-8fe9-f818943082a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.613449] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 871.613449] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526cd77c-c0ef-f012-af19-db532bb130ca" [ 871.613449] env[68244]: _type = "Task" [ 871.613449] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.647551] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12344b17-e6fd-45ac-8cf8-d5826a6cf377 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.654987] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526cd77c-c0ef-f012-af19-db532bb130ca, 'name': SearchDatastore_Task, 'duration_secs': 0.01679} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.655689] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.655956] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56/b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.656293] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a02f0e9-6d7c-46eb-8964-195a67824da8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.663641] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048b2995-64a7-4f47-8743-6a77e37574cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.669162] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 871.669162] env[68244]: value = "task-2780437" [ 871.669162] env[68244]: _type = "Task" [ 871.669162] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.682248] env[68244]: DEBUG nova.compute.provider_tree [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.689020] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.971560] env[68244]: DEBUG oslo_concurrency.lockutils [req-e54a75b6-807e-403b-8869-fa8514f58818 req-ee7543f9-3276-4941-839e-d885554a5690 service nova] Releasing lock "refresh_cache-b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.025140] env[68244]: DEBUG nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 872.039020] env[68244]: DEBUG oslo_vmware.api [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780436, 'name': PowerOnVM_Task, 'duration_secs': 1.047286} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.039020] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.039020] env[68244]: INFO nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Took 9.67 seconds to spawn the instance on the hypervisor. [ 872.039189] env[68244]: DEBUG nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.039945] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b69c79-efc4-46ba-917e-381a06ea59b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.062988] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 872.063202] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.064028] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 872.064028] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.064203] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 872.064408] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 872.064646] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 872.064811] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 872.064996] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 872.065288] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 872.065431] env[68244]: DEBUG nova.virt.hardware [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 872.066415] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5ca293-2d04-4bbb-97ee-f1c8ec303302 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.075463] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207b9fb0-d432-434f-b61a-e64e6b6179eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.181908] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780437, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.185201] env[68244]: DEBUG nova.scheduler.client.report [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.561655] env[68244]: INFO nova.compute.manager [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Took 42.60 seconds to build instance. [ 872.683296] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780437, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667594} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.683296] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56/b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.683296] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.683296] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-241d148f-c37c-4bef-80f5-ac1b098634c2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.688989] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 872.688989] env[68244]: value = "task-2780438" [ 872.688989] env[68244]: _type = "Task" [ 872.688989] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.693663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.693823] env[68244]: DEBUG nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 872.697392] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.199s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.699084] env[68244]: INFO nova.compute.claims [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.709396] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780438, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.014495] env[68244]: DEBUG nova.compute.manager [req-a296e631-47eb-45cf-b196-deb8469dd0c4 req-6318e3e0-d8cc-4ec3-b2f6-5c756da460b0 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-vif-plugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 873.014849] env[68244]: DEBUG oslo_concurrency.lockutils [req-a296e631-47eb-45cf-b196-deb8469dd0c4 req-6318e3e0-d8cc-4ec3-b2f6-5c756da460b0 service nova] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.015440] env[68244]: DEBUG oslo_concurrency.lockutils [req-a296e631-47eb-45cf-b196-deb8469dd0c4 req-6318e3e0-d8cc-4ec3-b2f6-5c756da460b0 service nova] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.015856] env[68244]: DEBUG oslo_concurrency.lockutils [req-a296e631-47eb-45cf-b196-deb8469dd0c4 req-6318e3e0-d8cc-4ec3-b2f6-5c756da460b0 service nova] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.016120] env[68244]: DEBUG nova.compute.manager [req-a296e631-47eb-45cf-b196-deb8469dd0c4 req-6318e3e0-d8cc-4ec3-b2f6-5c756da460b0 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] No waiting events found dispatching network-vif-plugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 873.016451] env[68244]: WARNING nova.compute.manager [req-a296e631-47eb-45cf-b196-deb8469dd0c4 req-6318e3e0-d8cc-4ec3-b2f6-5c756da460b0 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received unexpected event network-vif-plugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed for instance with vm_state building and task_state spawning. [ 873.066619] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d818e891-a0a1-4367-8444-9c3c24f947fc tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.190s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.136112] env[68244]: DEBUG nova.network.neutron [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Successfully updated port: 9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.199202] env[68244]: DEBUG nova.compute.utils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 873.205673] env[68244]: DEBUG nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 873.205797] env[68244]: DEBUG nova.network.neutron [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 873.211220] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780438, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07225} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.214130] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.217023] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f4bf13-f876-4796-90d4-b373f5bdad36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.245985] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56/b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.246791] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61b70b07-137a-48da-9933-ef08dee66dfc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.268737] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 873.268737] env[68244]: value = "task-2780439" [ 873.268737] env[68244]: _type = "Task" [ 873.268737] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.278185] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.287494] env[68244]: DEBUG nova.policy [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3449af18f8384e0382863f3b2d2357ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eab15ba4f32a45d1832ce9d831d62f34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 873.289597] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.290492] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.574022] env[68244]: DEBUG nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 873.639020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.639278] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.639433] env[68244]: DEBUG nova.network.neutron [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.713615] env[68244]: DEBUG nova.compute.utils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 873.730086] env[68244]: DEBUG nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 873.783365] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780439, 'name': ReconfigVM_Task, 'duration_secs': 0.327002} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.783822] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Reconfigured VM instance instance-00000033 to attach disk [datastore2] b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56/b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.784412] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e367304d-bc2f-425e-b922-4b2159245764 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.792492] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 873.792492] env[68244]: value = "task-2780440" [ 873.792492] env[68244]: _type = "Task" [ 873.792492] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.804844] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780440, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.823453] env[68244]: DEBUG nova.network.neutron [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Successfully created port: 2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.105264] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.208293] env[68244]: DEBUG nova.network.neutron [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.228769] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "184f7694-9cab-4184-a1c0-926763a81baf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.229179] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "184f7694-9cab-4184-a1c0-926763a81baf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.229413] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "184f7694-9cab-4184-a1c0-926763a81baf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.229604] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "184f7694-9cab-4184-a1c0-926763a81baf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.229776] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "184f7694-9cab-4184-a1c0-926763a81baf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.239903] env[68244]: INFO nova.compute.manager [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Terminating instance [ 874.305543] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780440, 'name': Rename_Task, 'duration_secs': 0.163112} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.309829] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.310334] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f275f892-ba5b-4625-ab36-991e84daa4a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.317809] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 874.317809] env[68244]: value = "task-2780441" [ 874.317809] env[68244]: _type = "Task" [ 874.317809] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.323770] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570ff343-06e2-45c0-97b9-98ce6a599e39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.331881] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.334775] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ef34c1-828c-490a-966a-698eab888e1b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.376917] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22a3664-c70b-40e0-b031-1caf889748c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.385517] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "874d6895-0f3d-4a99-b27a-cad627ddeecd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.385825] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.386080] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "874d6895-0f3d-4a99-b27a-cad627ddeecd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.386321] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.386524] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.389339] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44f18d5-610f-426d-9942-666ae016c0f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.394284] env[68244]: INFO nova.compute.manager [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Terminating instance [ 874.408130] env[68244]: DEBUG nova.compute.provider_tree [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.743425] env[68244]: DEBUG nova.network.neutron [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.745758] env[68244]: DEBUG nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 874.756073] env[68244]: DEBUG nova.compute.manager [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.756356] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.757930] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a110df-7e18-47a8-a3d4-5b673c6cd58e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.777315] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.777315] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb89200a-6658-4aee-be19-f1d9ffdcd175 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.780179] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:20:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1475918212',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-817804054',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 874.780427] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.780586] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 874.780768] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.782121] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 874.782240] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 874.782468] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 874.782632] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 874.782801] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 874.782964] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 874.785399] env[68244]: DEBUG nova.virt.hardware [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 874.785399] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0318a61c-557c-44cc-bc1f-da83986173ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.797418] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4080b770-95cc-4b53-83ec-c6921098b1dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.803909] env[68244]: DEBUG oslo_vmware.api [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 874.803909] env[68244]: value = "task-2780442" [ 874.803909] env[68244]: _type = "Task" [ 874.803909] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.828820] env[68244]: DEBUG oslo_vmware.api [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.835732] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780441, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.898766] env[68244]: DEBUG nova.compute.manager [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.898766] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.899660] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7099f142-6d3b-4c3d-aac8-b8da79599708 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.910631] env[68244]: DEBUG nova.scheduler.client.report [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.915731] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.919438] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-821765d7-6759-4748-adfa-37f110bbc6be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.930502] env[68244]: DEBUG oslo_vmware.api [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 874.930502] env[68244]: value = "task-2780443" [ 874.930502] env[68244]: _type = "Task" [ 874.930502] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.945581] env[68244]: DEBUG oslo_vmware.api [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.150617] env[68244]: DEBUG nova.compute.manager [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 875.150954] env[68244]: DEBUG nova.compute.manager [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing instance network info cache due to event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 875.151456] env[68244]: DEBUG oslo_concurrency.lockutils [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] Acquiring lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.249911] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.250122] env[68244]: DEBUG nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance network_info: |[{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 875.250572] env[68244]: DEBUG oslo_concurrency.lockutils [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] Acquired lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.250784] env[68244]: DEBUG nova.network.neutron [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.252142] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:f5:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9bcf5292-c53f-42bf-97f1-7f616748f9ed', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.261006] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating folder: Project (d41b4d274faa4f5a8951d39fa0d0c714). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 875.262241] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7101d92a-e78d-45d6-9d09-f297fc767d16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.274043] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created folder: Project (d41b4d274faa4f5a8951d39fa0d0c714) in parent group-v558876. [ 875.274265] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating folder: Instances. Parent ref: group-v559021. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 875.274498] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47ad7bd4-2545-41b0-afde-0f47e6bfd1ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.283525] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created folder: Instances in parent group-v559021. [ 875.283813] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.284015] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 875.284236] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cb6f12b-75f1-4e18-92cc-149570fe550a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.305181] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.305181] env[68244]: value = "task-2780446" [ 875.305181] env[68244]: _type = "Task" [ 875.305181] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.316765] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780446, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.319883] env[68244]: DEBUG oslo_vmware.api [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780442, 'name': PowerOffVM_Task, 'duration_secs': 0.250069} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.320167] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.320337] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.320590] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13eeedd3-066b-49f9-94dc-481bf2e9afc8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.330211] env[68244]: DEBUG oslo_vmware.api [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780441, 'name': PowerOnVM_Task, 'duration_secs': 0.778624} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.330458] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.330657] env[68244]: INFO nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Took 10.16 seconds to spawn the instance on the hypervisor. [ 875.330834] env[68244]: DEBUG nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 875.331632] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1afaa7-1714-4f0a-b532-225b4a8e1ac3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.396166] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.396362] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.396592] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleting the datastore file [datastore2] 184f7694-9cab-4184-a1c0-926763a81baf {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.396892] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1507f7bd-4440-4d6a-9b28-ba7166df90c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.405266] env[68244]: DEBUG oslo_vmware.api [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 875.405266] env[68244]: value = "task-2780448" [ 875.405266] env[68244]: _type = "Task" [ 875.405266] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.413982] env[68244]: DEBUG oslo_vmware.api [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.418267] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.721s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.418878] env[68244]: DEBUG nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 875.421494] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.453s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.421722] env[68244]: DEBUG nova.objects.instance [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lazy-loading 'resources' on Instance uuid 340aa1e7-dc0a-4cba-8979-0c591830e9db {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 875.442271] env[68244]: DEBUG oslo_vmware.api [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780443, 'name': PowerOffVM_Task, 'duration_secs': 0.224097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.442271] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.442271] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.442547] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34e7314f-400d-4563-9f73-84d397098cab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.507648] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.507987] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.508262] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleting the datastore file [datastore2] 874d6895-0f3d-4a99-b27a-cad627ddeecd {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.508593] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6e0b57b-9dca-4c5c-ad69-c4feb25cfd93 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.515062] env[68244]: DEBUG oslo_vmware.api [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for the task: (returnval){ [ 875.515062] env[68244]: value = "task-2780450" [ 875.515062] env[68244]: _type = "Task" [ 875.515062] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.523797] env[68244]: DEBUG oslo_vmware.api [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.821456] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780446, 'name': CreateVM_Task, 'duration_secs': 0.444987} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.821793] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.822351] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.822562] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.822882] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 875.823156] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39f17e83-db7c-45c7-b5cf-828572721061 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.827981] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 875.827981] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ac81d8-175e-25e3-1c12-c8266f745c10" [ 875.827981] env[68244]: _type = "Task" [ 875.827981] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.836613] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ac81d8-175e-25e3-1c12-c8266f745c10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.848651] env[68244]: INFO nova.compute.manager [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Took 44.12 seconds to build instance. [ 875.916536] env[68244]: DEBUG oslo_vmware.api [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177546} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.916843] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.917051] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.917270] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.917447] env[68244]: INFO nova.compute.manager [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 875.917757] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.917954] env[68244]: DEBUG nova.compute.manager [-] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.918070] env[68244]: DEBUG nova.network.neutron [-] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.928031] env[68244]: DEBUG nova.compute.utils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 875.929738] env[68244]: DEBUG nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 875.929912] env[68244]: DEBUG nova.network.neutron [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 875.993731] env[68244]: DEBUG nova.policy [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13034ebaf2c648ee9b35087ece8fe225', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e4f3ac0127744e4b6d8507c46bce1bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 876.025983] env[68244]: DEBUG oslo_vmware.api [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Task: {'id': task-2780450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196393} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.028919] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.029140] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.029380] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.029487] env[68244]: INFO nova.compute.manager [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 876.029727] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.031097] env[68244]: DEBUG nova.compute.manager [-] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 876.031211] env[68244]: DEBUG nova.network.neutron [-] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.234108] env[68244]: DEBUG nova.network.neutron [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Successfully updated port: 2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.275372] env[68244]: DEBUG nova.network.neutron [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updated VIF entry in instance network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.276903] env[68244]: DEBUG nova.network.neutron [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.302411] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.306131] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.342555] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ac81d8-175e-25e3-1c12-c8266f745c10, 'name': SearchDatastore_Task, 'duration_secs': 0.010319} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.343370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.343636] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.343882] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.345229] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.345229] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.345229] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c490778d-806e-40a8-a956-0031f03095e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.354524] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9fc9b1db-408d-4ad1-9849-0751ee163f81 tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.556s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.358250] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.358401] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.359752] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da7883d-fe5f-4974-9056-692f31474367 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.366695] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 876.366695] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5261ff07-e5ac-61d6-2d27-68f1da9a04c7" [ 876.366695] env[68244]: _type = "Task" [ 876.366695] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.375409] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5261ff07-e5ac-61d6-2d27-68f1da9a04c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.432474] env[68244]: DEBUG nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 876.603882] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd2c09d-d642-4dc3-b7bf-6bd884c53223 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.613056] env[68244]: DEBUG nova.network.neutron [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Successfully created port: 578b7668-06da-421b-9436-cfdb36e12b83 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.615848] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2eb778-2122-48ee-8fab-2345c2f1de82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.648650] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e4338d-f07f-4642-ab1c-e4c6765d4150 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.656222] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936843ae-4ff2-41b7-86cc-f0664838e38e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.671148] env[68244]: DEBUG nova.compute.provider_tree [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.739052] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.739052] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.739129] env[68244]: DEBUG nova.network.neutron [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.742520] env[68244]: DEBUG nova.compute.manager [req-f957dbab-432f-4b70-8d13-6c50f8955497 req-9e40161f-b731-4b4b-99cb-d31345b8d9e5 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Received event network-vif-deleted-b9e56a35-cb81-41d0-81d4-951205f52fa0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 876.742741] env[68244]: INFO nova.compute.manager [req-f957dbab-432f-4b70-8d13-6c50f8955497 req-9e40161f-b731-4b4b-99cb-d31345b8d9e5 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Neutron deleted interface b9e56a35-cb81-41d0-81d4-951205f52fa0; detaching it from the instance and deleting it from the info cache [ 876.742883] env[68244]: DEBUG nova.network.neutron [req-f957dbab-432f-4b70-8d13-6c50f8955497 req-9e40161f-b731-4b4b-99cb-d31345b8d9e5 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.779573] env[68244]: DEBUG oslo_concurrency.lockutils [req-bc01a40a-f14a-4aeb-8805-455b36053ade req-2e14dbd6-6c88-44f1-8fc3-f6e0c5ea6271 service nova] Releasing lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.820115] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.820115] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.820115] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.820115] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.820115] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.820115] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.820115] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 876.820115] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.860767] env[68244]: DEBUG nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 876.877015] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5261ff07-e5ac-61d6-2d27-68f1da9a04c7, 'name': SearchDatastore_Task, 'duration_secs': 0.009956} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.879289] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a45023b9-c486-48b9-a178-9b90abcff8cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.884409] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 876.884409] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5289529f-2000-fc83-d9a5-f654d1ec11a8" [ 876.884409] env[68244]: _type = "Task" [ 876.884409] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.894852] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5289529f-2000-fc83-d9a5-f654d1ec11a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.017077] env[68244]: DEBUG nova.network.neutron [-] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.024217] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527ab823-6075-1688-c661-240655e3a212/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 877.025215] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19f8ee3-6989-44ae-9d4c-4da3b4a58d41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.030600] env[68244]: DEBUG nova.network.neutron [-] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.034016] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527ab823-6075-1688-c661-240655e3a212/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 877.034016] env[68244]: ERROR oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527ab823-6075-1688-c661-240655e3a212/disk-0.vmdk due to incomplete transfer. [ 877.034016] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f7f0a945-09f2-408f-881c-0afd4d2f4bee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.041203] env[68244]: DEBUG oslo_vmware.rw_handles [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527ab823-6075-1688-c661-240655e3a212/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 877.041384] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Uploaded image b821da70-f5be-4fe6-8e00-4567c035cf0f to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 877.043880] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 877.043880] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-97d65882-7e91-4993-8d1a-f42aca74da61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.051269] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 877.051269] env[68244]: value = "task-2780451" [ 877.051269] env[68244]: _type = "Task" [ 877.051269] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.059526] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780451, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.175060] env[68244]: DEBUG nova.scheduler.client.report [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.179698] env[68244]: DEBUG nova.compute.manager [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Received event network-vif-plugged-2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 877.179698] env[68244]: DEBUG oslo_concurrency.lockutils [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] Acquiring lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.179954] env[68244]: DEBUG oslo_concurrency.lockutils [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.180182] env[68244]: DEBUG oslo_concurrency.lockutils [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.180382] env[68244]: DEBUG nova.compute.manager [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] No waiting events found dispatching network-vif-plugged-2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 877.180590] env[68244]: WARNING nova.compute.manager [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Received unexpected event network-vif-plugged-2a529edd-e384-4bf7-8ab6-a868cc9e2788 for instance with vm_state building and task_state spawning. [ 877.180816] env[68244]: DEBUG nova.compute.manager [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Received event network-changed-2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 877.181033] env[68244]: DEBUG nova.compute.manager [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Refreshing instance network info cache due to event network-changed-2a529edd-e384-4bf7-8ab6-a868cc9e2788. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 877.181267] env[68244]: DEBUG oslo_concurrency.lockutils [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] Acquiring lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.249209] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d673812-84bd-45e3-9883-93ece2c767f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.259555] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81dce4c-a9d5-4078-b65b-3c9c4c19f679 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.295768] env[68244]: DEBUG nova.compute.manager [req-f957dbab-432f-4b70-8d13-6c50f8955497 req-9e40161f-b731-4b4b-99cb-d31345b8d9e5 service nova] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Detach interface failed, port_id=b9e56a35-cb81-41d0-81d4-951205f52fa0, reason: Instance 184f7694-9cab-4184-a1c0-926763a81baf could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 877.297397] env[68244]: DEBUG nova.network.neutron [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.324216] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.369728] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.371321] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.371321] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.371321] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.371321] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.375021] env[68244]: INFO nova.compute.manager [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Terminating instance [ 877.394818] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5289529f-2000-fc83-d9a5-f654d1ec11a8, 'name': SearchDatastore_Task, 'duration_secs': 0.016089} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.395513] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.395844] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.396192] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.396475] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbdda45d-7aaa-4e1e-ba28-268fab944de6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.403298] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 877.403298] env[68244]: value = "task-2780452" [ 877.403298] env[68244]: _type = "Task" [ 877.403298] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.411135] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.442655] env[68244]: DEBUG nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 877.453394] env[68244]: DEBUG nova.network.neutron [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Updating instance_info_cache with network_info: [{"id": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "address": "fa:16:3e:e3:b1:14", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a529edd-e3", "ovs_interfaceid": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.465245] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.465245] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.465245] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.465245] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.465245] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.465245] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.465245] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.465544] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.465544] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.465612] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.465794] env[68244]: DEBUG nova.virt.hardware [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.466670] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135cd544-6580-48d3-8b34-2619ee8872bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.479567] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391691eb-3bdc-4ad2-b401-17fbb2cb477f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.519931] env[68244]: INFO nova.compute.manager [-] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Took 1.60 seconds to deallocate network for instance. [ 877.534514] env[68244]: INFO nova.compute.manager [-] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Took 1.50 seconds to deallocate network for instance. [ 877.561619] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780451, 'name': Destroy_Task, 'duration_secs': 0.336647} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.561742] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Destroyed the VM [ 877.565021] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 877.565021] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8e6a51a6-b0d8-430a-98f8-70f38f39b6d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.572017] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 877.572017] env[68244]: value = "task-2780453" [ 877.572017] env[68244]: _type = "Task" [ 877.572017] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.583464] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780453, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.683151] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.261s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.686774] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.546s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.689408] env[68244]: INFO nova.compute.claims [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.707289] env[68244]: INFO nova.scheduler.client.report [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Deleted allocations for instance 340aa1e7-dc0a-4cba-8979-0c591830e9db [ 877.878254] env[68244]: DEBUG nova.compute.manager [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 877.878613] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.879354] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03400af-ac8e-4a96-98b7-5ae2d1f2b23c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.887505] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.887809] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03e1fb31-69d2-4512-89ce-4bdd0287ec89 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.894127] env[68244]: DEBUG oslo_vmware.api [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 877.894127] env[68244]: value = "task-2780454" [ 877.894127] env[68244]: _type = "Task" [ 877.894127] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.902193] env[68244]: DEBUG oslo_vmware.api [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.913483] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476557} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.913763] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.914092] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.914274] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a4b7d18-4442-4524-8889-11df426be275 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.922265] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 877.922265] env[68244]: value = "task-2780455" [ 877.922265] env[68244]: _type = "Task" [ 877.922265] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.931201] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780455, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.955997] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Releasing lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.956415] env[68244]: DEBUG nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Instance network_info: |[{"id": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "address": "fa:16:3e:e3:b1:14", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a529edd-e3", "ovs_interfaceid": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 877.956739] env[68244]: DEBUG oslo_concurrency.lockutils [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] Acquired lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.956939] env[68244]: DEBUG nova.network.neutron [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Refreshing network info cache for port 2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 877.958246] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:b1:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a529edd-e384-4bf7-8ab6-a868cc9e2788', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.966185] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.969859] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.969859] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-729039d3-6b06-47e1-876f-f0a48bc82ceb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.992968] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.992968] env[68244]: value = "task-2780456" [ 877.992968] env[68244]: _type = "Task" [ 877.992968] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.002219] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780456, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.027145] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.042729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.084503] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780453, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.217069] env[68244]: DEBUG oslo_concurrency.lockutils [None req-546273f9-6c4b-4f8d-894c-6b6656aba10c tempest-ServerAddressesTestJSON-568627508 tempest-ServerAddressesTestJSON-568627508-project-member] Lock "340aa1e7-dc0a-4cba-8979-0c591830e9db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.163s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.227089] env[68244]: DEBUG nova.network.neutron [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Successfully updated port: 578b7668-06da-421b-9436-cfdb36e12b83 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 878.287016] env[68244]: DEBUG nova.network.neutron [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Updated VIF entry in instance network info cache for port 2a529edd-e384-4bf7-8ab6-a868cc9e2788. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 878.287016] env[68244]: DEBUG nova.network.neutron [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Updating instance_info_cache with network_info: [{"id": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "address": "fa:16:3e:e3:b1:14", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a529edd-e3", "ovs_interfaceid": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.403240] env[68244]: DEBUG oslo_vmware.api [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780454, 'name': PowerOffVM_Task, 'duration_secs': 0.272351} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.403508] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.403674] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.403924] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0cc0442f-6fcc-415e-b914-658c7990e12e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.431140] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780455, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123221} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.431428] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.432224] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2f28c5-2efd-4090-a8c1-c13c30543465 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.458430] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.458633] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53434596-ed99-47b8-a803-9f7b8ea9b741 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.479512] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 878.479512] env[68244]: value = "task-2780458" [ 878.479512] env[68244]: _type = "Task" [ 878.479512] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.487886] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.504931] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780456, 'name': CreateVM_Task, 'duration_secs': 0.335785} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.506306] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.506642] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.506848] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.507039] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Deleting the datastore file [datastore2] b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.507752] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.507914] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.508244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 878.508489] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a7d8b31-64b5-4914-b412-7933b78582b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.511032] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93e09de5-335e-458a-b894-2b25abb01f85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.515943] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 878.515943] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b9f7c5-d076-3db9-771a-38f2aa062520" [ 878.515943] env[68244]: _type = "Task" [ 878.515943] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.517276] env[68244]: DEBUG oslo_vmware.api [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for the task: (returnval){ [ 878.517276] env[68244]: value = "task-2780459" [ 878.517276] env[68244]: _type = "Task" [ 878.517276] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.529988] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b9f7c5-d076-3db9-771a-38f2aa062520, 'name': SearchDatastore_Task, 'duration_secs': 0.011263} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.532526] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.532776] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.541017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.541017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.541017] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.541017] env[68244]: DEBUG oslo_vmware.api [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.541017] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-788f5ca0-583f-4eec-ab3c-37141b1c2f31 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.545743] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.545973] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.546696] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20b552f9-31e9-40b4-8849-f59d6e67bc32 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.551623] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 878.551623] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c7c2b-7e90-ea77-9bdc-f778998ade4b" [ 878.551623] env[68244]: _type = "Task" [ 878.551623] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.559451] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c7c2b-7e90-ea77-9bdc-f778998ade4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.580830] env[68244]: DEBUG oslo_vmware.api [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780453, 'name': RemoveSnapshot_Task, 'duration_secs': 0.784522} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.581304] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 878.581630] env[68244]: INFO nova.compute.manager [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Took 15.99 seconds to snapshot the instance on the hypervisor. [ 878.731263] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "refresh_cache-df4674a2-87de-4507-950a-5941fae93aab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.731419] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquired lock "refresh_cache-df4674a2-87de-4507-950a-5941fae93aab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.731574] env[68244]: DEBUG nova.network.neutron [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.788107] env[68244]: DEBUG oslo_concurrency.lockutils [req-14c5a356-e026-4d78-a8d2-584f6468fea0 req-da16f1a5-28ae-4010-84f1-80f159578a36 service nova] Releasing lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.842087] env[68244]: DEBUG nova.compute.manager [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Received event network-vif-deleted-025021b3-9cd9-459b-929d-8c53b5721b24 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 878.842437] env[68244]: DEBUG nova.compute.manager [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Received event network-vif-plugged-578b7668-06da-421b-9436-cfdb36e12b83 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 878.842490] env[68244]: DEBUG oslo_concurrency.lockutils [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] Acquiring lock "df4674a2-87de-4507-950a-5941fae93aab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.842706] env[68244]: DEBUG oslo_concurrency.lockutils [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] Lock "df4674a2-87de-4507-950a-5941fae93aab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.842822] env[68244]: DEBUG oslo_concurrency.lockutils [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] Lock "df4674a2-87de-4507-950a-5941fae93aab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.842973] env[68244]: DEBUG nova.compute.manager [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] No waiting events found dispatching network-vif-plugged-578b7668-06da-421b-9436-cfdb36e12b83 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 878.843150] env[68244]: WARNING nova.compute.manager [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Received unexpected event network-vif-plugged-578b7668-06da-421b-9436-cfdb36e12b83 for instance with vm_state building and task_state spawning. [ 878.843306] env[68244]: DEBUG nova.compute.manager [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Received event network-changed-578b7668-06da-421b-9436-cfdb36e12b83 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 878.843454] env[68244]: DEBUG nova.compute.manager [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Refreshing instance network info cache due to event network-changed-578b7668-06da-421b-9436-cfdb36e12b83. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 878.843615] env[68244]: DEBUG oslo_concurrency.lockutils [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] Acquiring lock "refresh_cache-df4674a2-87de-4507-950a-5941fae93aab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.992256] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780458, 'name': ReconfigVM_Task, 'duration_secs': 0.266215} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.992560] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfigured VM instance instance-00000034 to attach disk [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.993168] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3bedd0c-b10d-416e-8f59-bfae7a887605 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.000151] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 879.000151] env[68244]: value = "task-2780460" [ 879.000151] env[68244]: _type = "Task" [ 879.000151] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.017277] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780460, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.030936] env[68244]: DEBUG oslo_vmware.api [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Task: {'id': task-2780459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142489} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.031010] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.031161] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.031419] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.031596] env[68244]: INFO nova.compute.manager [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Took 1.15 seconds to destroy the instance on the hypervisor. [ 879.031872] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.033528] env[68244]: DEBUG nova.compute.manager [-] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 879.033639] env[68244]: DEBUG nova.network.neutron [-] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.060958] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526c7c2b-7e90-ea77-9bdc-f778998ade4b, 'name': SearchDatastore_Task, 'duration_secs': 0.008042} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.061919] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-321e1e2a-9060-4445-ab3d-0e2c2214c701 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.070020] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 879.070020] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5242099f-c193-63ba-eb0f-3181b78f55b0" [ 879.070020] env[68244]: _type = "Task" [ 879.070020] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.079270] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5242099f-c193-63ba-eb0f-3181b78f55b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.156178] env[68244]: DEBUG nova.compute.manager [None req-37a58ed9-437a-4fca-ac9d-6a57c1cfc36e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Found 1 images (rotation: 2) {{(pid=68244) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 879.199456] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b99d86-6668-4687-b951-669a75eee483 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.207771] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0830eb1-f658-4974-ac21-9dbc4c250cf6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.241504] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6991c37-dd0b-4e09-9657-5fee498fd22e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.249708] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780ba588-043e-4dea-ad51-433ae01e74a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.265572] env[68244]: DEBUG nova.compute.provider_tree [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.282929] env[68244]: DEBUG nova.network.neutron [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.486751] env[68244]: DEBUG nova.network.neutron [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Updating instance_info_cache with network_info: [{"id": "578b7668-06da-421b-9436-cfdb36e12b83", "address": "fa:16:3e:74:90:ab", "network": {"id": "b44577f3-a481-45d1-9d8a-22ab788e1d4c", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1053393449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e4f3ac0127744e4b6d8507c46bce1bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap578b7668-06", "ovs_interfaceid": "578b7668-06da-421b-9436-cfdb36e12b83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.517396] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780460, 'name': Rename_Task, 'duration_secs': 0.140971} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.517396] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.517396] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-595ee0cf-690e-468d-af5c-644afadbf910 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.526590] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 879.526590] env[68244]: value = "task-2780461" [ 879.526590] env[68244]: _type = "Task" [ 879.526590] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.536264] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.582615] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5242099f-c193-63ba-eb0f-3181b78f55b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009306} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.585644] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.585644] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ffa17045-fadf-47d7-9c3b-19d0d54de3fc/ffa17045-fadf-47d7-9c3b-19d0d54de3fc.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 879.585644] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aafef568-ce00-41be-9ec9-d125e8c1c3fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.589592] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 879.589592] env[68244]: value = "task-2780462" [ 879.589592] env[68244]: _type = "Task" [ 879.589592] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.600411] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.773223] env[68244]: DEBUG nova.scheduler.client.report [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 879.780287] env[68244]: DEBUG nova.compute.manager [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.784448] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0037dd59-d266-4d45-b0e3-5b8f1f7b1123 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.989764] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Releasing lock "refresh_cache-df4674a2-87de-4507-950a-5941fae93aab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.990069] env[68244]: DEBUG nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Instance network_info: |[{"id": "578b7668-06da-421b-9436-cfdb36e12b83", "address": "fa:16:3e:74:90:ab", "network": {"id": "b44577f3-a481-45d1-9d8a-22ab788e1d4c", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1053393449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e4f3ac0127744e4b6d8507c46bce1bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap578b7668-06", "ovs_interfaceid": "578b7668-06da-421b-9436-cfdb36e12b83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 879.991408] env[68244]: DEBUG oslo_concurrency.lockutils [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] Acquired lock "refresh_cache-df4674a2-87de-4507-950a-5941fae93aab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.991408] env[68244]: DEBUG nova.network.neutron [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Refreshing network info cache for port 578b7668-06da-421b-9436-cfdb36e12b83 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.991810] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:90:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '578b7668-06da-421b-9436-cfdb36e12b83', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.000095] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Creating folder: Project (3e4f3ac0127744e4b6d8507c46bce1bc). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 880.003498] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2f7204e-7788-4b9e-aac9-3d512977a67a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.015450] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Created folder: Project (3e4f3ac0127744e4b6d8507c46bce1bc) in parent group-v558876. [ 880.015668] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Creating folder: Instances. Parent ref: group-v559025. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 880.015913] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed5069d8-e787-4c35-b42f-e487a0f2a6f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.032704] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Created folder: Instances in parent group-v559025. [ 880.033034] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.033679] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df4674a2-87de-4507-950a-5941fae93aab] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 880.038024] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b49dbde-1fe0-4800-8cf3-802bf6acb852 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.060397] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780461, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.065164] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.065164] env[68244]: value = "task-2780465" [ 880.065164] env[68244]: _type = "Task" [ 880.065164] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.073756] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780465, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.102272] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780462, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474307} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.104896] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ffa17045-fadf-47d7-9c3b-19d0d54de3fc/ffa17045-fadf-47d7-9c3b-19d0d54de3fc.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.105305] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.105601] env[68244]: DEBUG nova.network.neutron [-] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.107417] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7cbbfde8-0f5b-4f10-b069-92ad78cd80dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.114627] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 880.114627] env[68244]: value = "task-2780466" [ 880.114627] env[68244]: _type = "Task" [ 880.114627] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.128664] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780466, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.281762] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.282307] env[68244]: DEBUG nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 880.287330] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.405s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.292028] env[68244]: INFO nova.compute.claims [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.295329] env[68244]: INFO nova.compute.manager [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] instance snapshotting [ 880.296264] env[68244]: DEBUG nova.objects.instance [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'flavor' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.316199] env[68244]: DEBUG nova.network.neutron [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Updated VIF entry in instance network info cache for port 578b7668-06da-421b-9436-cfdb36e12b83. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.316536] env[68244]: DEBUG nova.network.neutron [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Updating instance_info_cache with network_info: [{"id": "578b7668-06da-421b-9436-cfdb36e12b83", "address": "fa:16:3e:74:90:ab", "network": {"id": "b44577f3-a481-45d1-9d8a-22ab788e1d4c", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1053393449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e4f3ac0127744e4b6d8507c46bce1bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap578b7668-06", "ovs_interfaceid": "578b7668-06da-421b-9436-cfdb36e12b83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.537289] env[68244]: DEBUG oslo_vmware.api [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780461, 'name': PowerOnVM_Task, 'duration_secs': 0.722323} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.537585] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.537775] env[68244]: INFO nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Took 8.51 seconds to spawn the instance on the hypervisor. [ 880.539574] env[68244]: DEBUG nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.539574] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3a5c79-fd4f-4179-874f-fd022785aaca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.574801] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780465, 'name': CreateVM_Task, 'duration_secs': 0.357724} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.576742] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df4674a2-87de-4507-950a-5941fae93aab] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.576742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.576742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.576742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 880.576742] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99e00d67-aee4-444a-bd2f-9d28eb508d54 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.581420] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 880.581420] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52650c33-7f61-d2fa-941e-28e3c7d4c38c" [ 880.581420] env[68244]: _type = "Task" [ 880.581420] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.590947] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52650c33-7f61-d2fa-941e-28e3c7d4c38c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.611062] env[68244]: INFO nova.compute.manager [-] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Took 1.58 seconds to deallocate network for instance. [ 880.625998] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780466, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07392} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.626397] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.627628] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842311d8-3ba9-4e14-8e2a-cc0e7df54500 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.652919] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] ffa17045-fadf-47d7-9c3b-19d0d54de3fc/ffa17045-fadf-47d7-9c3b-19d0d54de3fc.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.653914] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37f80917-cb5c-45cb-a740-82384653450e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.674927] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 880.674927] env[68244]: value = "task-2780467" [ 880.674927] env[68244]: _type = "Task" [ 880.674927] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.684102] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780467, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.796165] env[68244]: DEBUG nova.compute.utils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 880.800029] env[68244]: DEBUG nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 880.800166] env[68244]: DEBUG nova.network.neutron [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 880.805202] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97686cd-c51e-4209-89d8-5ffdd6c54231 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.828543] env[68244]: DEBUG oslo_concurrency.lockutils [req-5684a56a-4675-43e5-bea9-99f98126e984 req-fcbd1190-b467-4d8e-8206-4051335b2e4f service nova] Releasing lock "refresh_cache-df4674a2-87de-4507-950a-5941fae93aab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.829656] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447f0f8c-175e-461b-bdc5-d09761285f1d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.842563] env[68244]: DEBUG nova.policy [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcb360676a0b4898a283980e7839c68a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15f251056bf64f719c7094479b569f0d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 880.913567] env[68244]: DEBUG nova.compute.manager [req-862685e7-8140-435c-922f-30c628e25ed1 req-93edbec6-57d6-42fa-83ce-133b5c01315e service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received event network-vif-deleted-baebfb02-7f73-4cd8-a535-e4630409c5c8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 880.913766] env[68244]: DEBUG nova.compute.manager [req-862685e7-8140-435c-922f-30c628e25ed1 req-93edbec6-57d6-42fa-83ce-133b5c01315e service nova] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Received event network-vif-deleted-eb9064b8-063e-4e1c-9628-f254f4758e7e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 881.059197] env[68244]: INFO nova.compute.manager [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Took 42.01 seconds to build instance. [ 881.094964] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52650c33-7f61-d2fa-941e-28e3c7d4c38c, 'name': SearchDatastore_Task, 'duration_secs': 0.050561} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.095359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.095564] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.095811] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.095958] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.096156] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.097272] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9138176b-1aa4-4b40-ab96-8f6343b8e758 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.105909] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.106147] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 881.106916] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afd3f388-eaf0-4ad7-acfd-0575e93adc0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.113918] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 881.113918] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520599de-d15c-e10b-8447-dde08da5bf7f" [ 881.113918] env[68244]: _type = "Task" [ 881.113918] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.122557] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.122696] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520599de-d15c-e10b-8447-dde08da5bf7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.128797] env[68244]: DEBUG nova.network.neutron [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Successfully created port: 26966576-ec16-40c4-b057-eb88b817f439 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.185767] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780467, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.301203] env[68244]: DEBUG nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 881.340630] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 881.341107] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-400e93a1-730c-402f-b563-1229eec022d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.358149] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 881.358149] env[68244]: value = "task-2780468" [ 881.358149] env[68244]: _type = "Task" [ 881.358149] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.372176] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780468, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.561657] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8129918e-eb1a-49ad-95eb-51060920d474 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.537s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.624201] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520599de-d15c-e10b-8447-dde08da5bf7f, 'name': SearchDatastore_Task, 'duration_secs': 0.0085} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.625331] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b01f812-f259-4d89-bd81-c9d13f61ba71 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.632774] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 881.632774] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d19cf0-b7e6-6fe6-7a79-510f532830d6" [ 881.632774] env[68244]: _type = "Task" [ 881.632774] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.640474] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d19cf0-b7e6-6fe6-7a79-510f532830d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.685897] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780467, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.786164] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781e32f4-e008-4698-8d71-6101d2d0e942 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.793512] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581e5c37-abc9-4b51-9013-fb34cbab8e35 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.826923] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c272788-138c-4684-83ce-09bd56c6b845 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.834671] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d6467f-6b00-4163-a8b1-e140005ef411 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.848466] env[68244]: DEBUG nova.compute.provider_tree [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.872022] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780468, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.064972] env[68244]: DEBUG nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.145442] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d19cf0-b7e6-6fe6-7a79-510f532830d6, 'name': SearchDatastore_Task, 'duration_secs': 0.009405} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.145854] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.146503] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] df4674a2-87de-4507-950a-5941fae93aab/df4674a2-87de-4507-950a-5941fae93aab.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.146908] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c7eacd6-8ca3-4e32-8419-c304031cf6e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.154464] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 882.154464] env[68244]: value = "task-2780469" [ 882.154464] env[68244]: _type = "Task" [ 882.154464] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.162060] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.184872] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780467, 'name': ReconfigVM_Task, 'duration_secs': 1.102048} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.189269] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Reconfigured VM instance instance-00000035 to attach disk [datastore2] ffa17045-fadf-47d7-9c3b-19d0d54de3fc/ffa17045-fadf-47d7-9c3b-19d0d54de3fc.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.189576] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68244) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 882.190629] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-d4c1b223-3a53-4980-af84-bf5861457bf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.198023] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 882.198023] env[68244]: value = "task-2780470" [ 882.198023] env[68244]: _type = "Task" [ 882.198023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.209079] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780470, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.331531] env[68244]: DEBUG nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 882.352538] env[68244]: DEBUG nova.scheduler.client.report [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.363223] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 882.363530] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.363729] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 882.363867] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.364018] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 882.364209] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 882.364443] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 882.364616] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 882.364929] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 882.365130] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 882.365343] env[68244]: DEBUG nova.virt.hardware [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 882.366390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1831df-fac7-4a7e-a989-729d0793ff0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.383429] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035110a2-12c5-4aa9-91fe-162bfc86e23f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.388153] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780468, 'name': CreateSnapshot_Task, 'duration_secs': 0.998922} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.388837] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 882.390117] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73e72c0-2a76-41d4-a6db-e33f0cc5e87b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.595117] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.665765] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780469, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.708897] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780470, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.053517} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.709107] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68244) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 882.709913] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c247d5-d3ae-41fc-8ca9-4bbc94346398 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.734962] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] ffa17045-fadf-47d7-9c3b-19d0d54de3fc/ephemeral_0.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.735432] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27483d00-e895-44c7-9e10-2eea602274a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.754108] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 882.754108] env[68244]: value = "task-2780471" [ 882.754108] env[68244]: _type = "Task" [ 882.754108] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.762575] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780471, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.858959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.571s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.858959] env[68244]: DEBUG nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 882.863597] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.386s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.863900] env[68244]: DEBUG nova.objects.instance [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lazy-loading 'resources' on Instance uuid d81bdefa-9c23-413b-9670-bbb2139084f7 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.925788] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 882.927031] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e3c57e8b-a3f9-4b60-993c-d2d00f6e44b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.937642] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 882.937642] env[68244]: value = "task-2780472" [ 882.937642] env[68244]: _type = "Task" [ 882.937642] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.948040] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.001203] env[68244]: DEBUG nova.compute.manager [req-da9630a7-31fc-4e4b-925b-0f744cbc0ce2 req-c97961c3-f307-454c-a4aa-7e534d385287 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Received event network-vif-plugged-26966576-ec16-40c4-b057-eb88b817f439 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 883.001433] env[68244]: DEBUG oslo_concurrency.lockutils [req-da9630a7-31fc-4e4b-925b-0f744cbc0ce2 req-c97961c3-f307-454c-a4aa-7e534d385287 service nova] Acquiring lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.001699] env[68244]: DEBUG oslo_concurrency.lockutils [req-da9630a7-31fc-4e4b-925b-0f744cbc0ce2 req-c97961c3-f307-454c-a4aa-7e534d385287 service nova] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.001843] env[68244]: DEBUG oslo_concurrency.lockutils [req-da9630a7-31fc-4e4b-925b-0f744cbc0ce2 req-c97961c3-f307-454c-a4aa-7e534d385287 service nova] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.002545] env[68244]: DEBUG nova.compute.manager [req-da9630a7-31fc-4e4b-925b-0f744cbc0ce2 req-c97961c3-f307-454c-a4aa-7e534d385287 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] No waiting events found dispatching network-vif-plugged-26966576-ec16-40c4-b057-eb88b817f439 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 883.002875] env[68244]: WARNING nova.compute.manager [req-da9630a7-31fc-4e4b-925b-0f744cbc0ce2 req-c97961c3-f307-454c-a4aa-7e534d385287 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Received unexpected event network-vif-plugged-26966576-ec16-40c4-b057-eb88b817f439 for instance with vm_state building and task_state spawning. [ 883.106981] env[68244]: DEBUG nova.network.neutron [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Successfully updated port: 26966576-ec16-40c4-b057-eb88b817f439 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.165813] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552331} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.166207] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] df4674a2-87de-4507-950a-5941fae93aab/df4674a2-87de-4507-950a-5941fae93aab.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.166482] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.166831] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb81ba29-247d-41aa-af25-6da95b1dca66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.173871] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 883.173871] env[68244]: value = "task-2780473" [ 883.173871] env[68244]: _type = "Task" [ 883.173871] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.183488] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.265484] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780471, 'name': ReconfigVM_Task, 'duration_secs': 0.313071} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.265767] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Reconfigured VM instance instance-00000035 to attach disk [datastore2] ffa17045-fadf-47d7-9c3b-19d0d54de3fc/ephemeral_0.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.266448] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25e75e89-5443-44a7-8cc6-ae11a5435664 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.273544] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 883.273544] env[68244]: value = "task-2780474" [ 883.273544] env[68244]: _type = "Task" [ 883.273544] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.281951] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780474, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.370200] env[68244]: DEBUG nova.compute.utils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 883.374944] env[68244]: DEBUG nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 883.375534] env[68244]: DEBUG nova.network.neutron [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.418935] env[68244]: DEBUG nova.policy [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cddbad2016a4b98b5c05082a13f59f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87c0473d09c04fb8a80d27a43c07bef4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 883.452964] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.610642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "refresh_cache-45ec526b-e9d8-4ea3-b0c8-af6da39b0158" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.611384] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "refresh_cache-45ec526b-e9d8-4ea3-b0c8-af6da39b0158" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.611384] env[68244]: DEBUG nova.network.neutron [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.688660] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122899} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.689052] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.689806] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188eac83-950a-458a-9b9a-9a8fe646a542 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.717550] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] df4674a2-87de-4507-950a-5941fae93aab/df4674a2-87de-4507-950a-5941fae93aab.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.721446] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e92c7fe9-37de-45ee-bfb7-835345a8a99c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.743348] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 883.743348] env[68244]: value = "task-2780475" [ 883.743348] env[68244]: _type = "Task" [ 883.743348] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.753528] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.784177] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780474, 'name': Rename_Task, 'duration_secs': 0.168828} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.785082] env[68244]: DEBUG nova.network.neutron [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Successfully created port: 7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.789394] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.789854] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3d2140b-9434-4483-881f-7e2d9b94202e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.798494] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 883.798494] env[68244]: value = "task-2780476" [ 883.798494] env[68244]: _type = "Task" [ 883.798494] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.806912] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.877215] env[68244]: DEBUG nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 883.943611] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615da78b-ae34-491d-afcd-63c89eccc881 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.957611] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9143044c-2692-4bc0-9a8c-5eca1c4a4476 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.961306] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.993789] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17803180-6cf1-46f4-9f92-757d46fabfba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.005029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec25756-601a-4431-99fb-ff8f0bae0dba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.018020] env[68244]: DEBUG nova.compute.provider_tree [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.156299] env[68244]: DEBUG nova.network.neutron [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.253856] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780475, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.299552] env[68244]: DEBUG nova.network.neutron [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Updating instance_info_cache with network_info: [{"id": "26966576-ec16-40c4-b057-eb88b817f439", "address": "fa:16:3e:3b:f0:28", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26966576-ec", "ovs_interfaceid": "26966576-ec16-40c4-b057-eb88b817f439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.310619] env[68244]: DEBUG oslo_vmware.api [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780476, 'name': PowerOnVM_Task, 'duration_secs': 0.502814} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.312206] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.312206] env[68244]: INFO nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Took 9.56 seconds to spawn the instance on the hypervisor. [ 884.312206] env[68244]: DEBUG nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 884.312715] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1574f9-5855-4c80-96f3-21be8b20d6d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.451527] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.522096] env[68244]: DEBUG nova.scheduler.client.report [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 884.756371] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780475, 'name': ReconfigVM_Task, 'duration_secs': 0.831236} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.756371] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Reconfigured VM instance instance-00000036 to attach disk [datastore2] df4674a2-87de-4507-950a-5941fae93aab/df4674a2-87de-4507-950a-5941fae93aab.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.756371] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2575f180-c5f7-4136-805c-8393f462bd6e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.761994] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 884.761994] env[68244]: value = "task-2780477" [ 884.761994] env[68244]: _type = "Task" [ 884.761994] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.769902] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780477, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.803744] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "refresh_cache-45ec526b-e9d8-4ea3-b0c8-af6da39b0158" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.804160] env[68244]: DEBUG nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Instance network_info: |[{"id": "26966576-ec16-40c4-b057-eb88b817f439", "address": "fa:16:3e:3b:f0:28", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26966576-ec", "ovs_interfaceid": "26966576-ec16-40c4-b057-eb88b817f439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 884.804972] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:f0:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26966576-ec16-40c4-b057-eb88b817f439', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.812618] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.812852] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.813068] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85e830c9-f086-4fec-9c14-2de5e48e887b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.840377] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.840377] env[68244]: value = "task-2780478" [ 884.840377] env[68244]: _type = "Task" [ 884.840377] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.840377] env[68244]: INFO nova.compute.manager [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Took 44.17 seconds to build instance. [ 884.847676] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780478, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.891755] env[68244]: DEBUG nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 884.923427] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 884.923750] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.923916] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 884.924236] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.924398] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 884.924540] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 884.924755] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 884.924931] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 884.925140] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 884.925310] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 884.925493] env[68244]: DEBUG nova.virt.hardware [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 884.927264] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4b6d4c-c49d-4bf1-af47-a049d5f4b541 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.939387] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3aa50df-b2d7-4fde-aacc-8f74f2dca5b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.961150] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.004082] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.004320] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.027488] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.031295] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.020s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.032780] env[68244]: INFO nova.compute.claims [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.040019] env[68244]: DEBUG nova.compute.manager [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Received event network-changed-26966576-ec16-40c4-b057-eb88b817f439 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 885.040019] env[68244]: DEBUG nova.compute.manager [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Refreshing instance network info cache due to event network-changed-26966576-ec16-40c4-b057-eb88b817f439. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 885.040019] env[68244]: DEBUG oslo_concurrency.lockutils [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] Acquiring lock "refresh_cache-45ec526b-e9d8-4ea3-b0c8-af6da39b0158" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.040019] env[68244]: DEBUG oslo_concurrency.lockutils [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] Acquired lock "refresh_cache-45ec526b-e9d8-4ea3-b0c8-af6da39b0158" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.040019] env[68244]: DEBUG nova.network.neutron [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Refreshing network info cache for port 26966576-ec16-40c4-b057-eb88b817f439 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.046574] env[68244]: INFO nova.scheduler.client.report [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Deleted allocations for instance d81bdefa-9c23-413b-9670-bbb2139084f7 [ 885.278908] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780477, 'name': Rename_Task, 'duration_secs': 0.142771} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.279263] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.279428] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8532a237-5edb-4ed3-b330-05a1c34c9e1e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.285968] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 885.285968] env[68244]: value = "task-2780479" [ 885.285968] env[68244]: _type = "Task" [ 885.285968] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.294769] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.335413] env[68244]: DEBUG nova.network.neutron [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Successfully updated port: 7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 885.341894] env[68244]: DEBUG nova.compute.manager [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Received event network-changed-2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 885.342101] env[68244]: DEBUG nova.compute.manager [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Refreshing instance network info cache due to event network-changed-2a529edd-e384-4bf7-8ab6-a868cc9e2788. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 885.342316] env[68244]: DEBUG oslo_concurrency.lockutils [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] Acquiring lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.342454] env[68244]: DEBUG oslo_concurrency.lockutils [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] Acquired lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.342609] env[68244]: DEBUG nova.network.neutron [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Refreshing network info cache for port 2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.347561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aa0ddc8b-835c-4049-a985-f16224e9960e tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.466s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.355204] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780478, 'name': CreateVM_Task, 'duration_secs': 0.333411} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.355255] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.355948] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.356420] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.356551] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 885.357013] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7346c6e8-bdd7-408c-918e-4de644281a74 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.362533] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 885.362533] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52451bc9-92e0-e060-5cbe-54883cfa3507" [ 885.362533] env[68244]: _type = "Task" [ 885.362533] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.371830] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52451bc9-92e0-e060-5cbe-54883cfa3507, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.453555] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.555839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fd12b592-a2f9-446d-9c55-220ae8cbed3f tempest-ServersTestManualDisk-1540218977 tempest-ServersTestManualDisk-1540218977-project-member] Lock "d81bdefa-9c23-413b-9670-bbb2139084f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.872s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.796329] env[68244]: DEBUG oslo_vmware.api [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780479, 'name': PowerOnVM_Task, 'duration_secs': 0.455684} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.796614] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.796825] env[68244]: INFO nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Took 8.35 seconds to spawn the instance on the hypervisor. [ 885.797017] env[68244]: DEBUG nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 885.798190] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653750f4-9697-470d-9636-4cfb6bff66e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.837690] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.837845] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.837999] env[68244]: DEBUG nova.network.neutron [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.855727] env[68244]: DEBUG nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 885.875573] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52451bc9-92e0-e060-5cbe-54883cfa3507, 'name': SearchDatastore_Task, 'duration_secs': 0.02027} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.876197] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.876464] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.876713] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.876863] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.877068] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.877335] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e0f30c5-9355-4a98-bae0-44263d3f9f65 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.886068] env[68244]: DEBUG nova.network.neutron [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Updated VIF entry in instance network info cache for port 26966576-ec16-40c4-b057-eb88b817f439. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 885.886729] env[68244]: DEBUG nova.network.neutron [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Updating instance_info_cache with network_info: [{"id": "26966576-ec16-40c4-b057-eb88b817f439", "address": "fa:16:3e:3b:f0:28", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26966576-ec", "ovs_interfaceid": "26966576-ec16-40c4-b057-eb88b817f439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.890221] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.890427] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.891710] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58eca26e-5fd1-44ed-b0a6-5aa67ffaaaa3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.897215] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 885.897215] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a067ce-41cc-9f3a-6210-7db87f061325" [ 885.897215] env[68244]: _type = "Task" [ 885.897215] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.907798] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a067ce-41cc-9f3a-6210-7db87f061325, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.955664] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.205484] env[68244]: DEBUG nova.network.neutron [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Updated VIF entry in instance network info cache for port 2a529edd-e384-4bf7-8ab6-a868cc9e2788. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.205847] env[68244]: DEBUG nova.network.neutron [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Updating instance_info_cache with network_info: [{"id": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "address": "fa:16:3e:e3:b1:14", "network": {"id": "6773bdcd-2951-4796-bc90-1112ce35feaa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1633967643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eab15ba4f32a45d1832ce9d831d62f34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a529edd-e3", "ovs_interfaceid": "2a529edd-e384-4bf7-8ab6-a868cc9e2788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.330092] env[68244]: INFO nova.compute.manager [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Took 42.87 seconds to build instance. [ 886.380820] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.385311] env[68244]: DEBUG nova.network.neutron [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.392157] env[68244]: DEBUG oslo_concurrency.lockutils [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] Releasing lock "refresh_cache-45ec526b-e9d8-4ea3-b0c8-af6da39b0158" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.392950] env[68244]: DEBUG nova.compute.manager [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 886.392950] env[68244]: DEBUG nova.compute.manager [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing instance network info cache due to event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 886.392950] env[68244]: DEBUG oslo_concurrency.lockutils [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] Acquiring lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.392950] env[68244]: DEBUG oslo_concurrency.lockutils [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] Acquired lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.393168] env[68244]: DEBUG nova.network.neutron [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.418994] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a067ce-41cc-9f3a-6210-7db87f061325, 'name': SearchDatastore_Task, 'duration_secs': 0.033159} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.419617] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ae047ea-416c-4428-9182-8535ac3e4a8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.428119] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 886.428119] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52067c81-02dc-961d-5aae-75fc22a0407d" [ 886.428119] env[68244]: _type = "Task" [ 886.428119] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.437014] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52067c81-02dc-961d-5aae-75fc22a0407d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.456553] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.561019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c6f753-30c8-455d-8523-cc96ad8c561f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.570210] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d088015-e19e-465e-afa0-c60244015a82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.605340] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4689aaf9-0e5c-4b97-bbfa-ec0aa331609a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.613043] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5282af-039a-4da8-8ce5-2f2d93230144 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.626994] env[68244]: DEBUG nova.compute.provider_tree [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.629154] env[68244]: DEBUG nova.network.neutron [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.710099] env[68244]: DEBUG oslo_concurrency.lockutils [req-3db498b2-c770-4309-9776-e31a89117630 req-45b44123-8b4f-42b8-b473-eb654c185709 service nova] Releasing lock "refresh_cache-ffa17045-fadf-47d7-9c3b-19d0d54de3fc" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.833240] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a0ad322-f334-4139-9328-cf0d6cce7b96 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "df4674a2-87de-4507-950a-5941fae93aab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.550s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.913983] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "df4674a2-87de-4507-950a-5941fae93aab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.913983] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "df4674a2-87de-4507-950a-5941fae93aab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.914177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "df4674a2-87de-4507-950a-5941fae93aab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.914361] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "df4674a2-87de-4507-950a-5941fae93aab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.914528] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "df4674a2-87de-4507-950a-5941fae93aab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.916772] env[68244]: INFO nova.compute.manager [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Terminating instance [ 886.942104] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52067c81-02dc-961d-5aae-75fc22a0407d, 'name': SearchDatastore_Task, 'duration_secs': 0.009979} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.942199] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.942415] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 45ec526b-e9d8-4ea3-b0c8-af6da39b0158/45ec526b-e9d8-4ea3-b0c8-af6da39b0158.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.942690] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86e5f87d-54fe-4204-8e2f-858c5098815d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.952714] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 886.952714] env[68244]: value = "task-2780480" [ 886.952714] env[68244]: _type = "Task" [ 886.952714] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.959916] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.965923] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.078200] env[68244]: DEBUG nova.compute.manager [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Received event network-vif-plugged-7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 887.078422] env[68244]: DEBUG oslo_concurrency.lockutils [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] Acquiring lock "cedcff81-0010-4fa6-95bf-72a4dcac5427-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.078682] env[68244]: DEBUG oslo_concurrency.lockutils [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.079505] env[68244]: DEBUG oslo_concurrency.lockutils [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.079919] env[68244]: DEBUG nova.compute.manager [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] No waiting events found dispatching network-vif-plugged-7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 887.080092] env[68244]: WARNING nova.compute.manager [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Received unexpected event network-vif-plugged-7cc08f9f-ecf6-45df-a147-29489ed20ade for instance with vm_state building and task_state spawning. [ 887.080171] env[68244]: DEBUG nova.compute.manager [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Received event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 887.080364] env[68244]: DEBUG nova.compute.manager [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing instance network info cache due to event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 887.080579] env[68244]: DEBUG oslo_concurrency.lockutils [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] Acquiring lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.131772] env[68244]: DEBUG nova.scheduler.client.report [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.136168] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.136491] env[68244]: DEBUG nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Instance network_info: |[{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 887.136792] env[68244]: DEBUG oslo_concurrency.lockutils [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] Acquired lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.136994] env[68244]: DEBUG nova.network.neutron [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.139368] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:45:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98d96b75-ac36-499a-adc2-130c8c1d55ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cc08f9f-ecf6-45df-a147-29489ed20ade', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 887.148236] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 887.151817] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 887.152979] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fe4b428-7eac-4886-919b-8ef42aa71dae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.177235] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 887.177235] env[68244]: value = "task-2780481" [ 887.177235] env[68244]: _type = "Task" [ 887.177235] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.187083] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780481, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.371627] env[68244]: DEBUG nova.network.neutron [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updated VIF entry in instance network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 887.371917] env[68244]: DEBUG nova.network.neutron [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.421361] env[68244]: DEBUG nova.compute.manager [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 887.421614] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 887.422765] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d16438-d5c2-4cf9-9819-9f9ac24f6d2f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.431903] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.432350] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02ae1758-8be3-4b2b-929d-03f13439313e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.440602] env[68244]: DEBUG oslo_vmware.api [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 887.440602] env[68244]: value = "task-2780482" [ 887.440602] env[68244]: _type = "Task" [ 887.440602] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.461025] env[68244]: DEBUG oslo_vmware.api [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.471080] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.477970] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780480, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506228} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.477970] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 45ec526b-e9d8-4ea3-b0c8-af6da39b0158/45ec526b-e9d8-4ea3-b0c8-af6da39b0158.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.477970] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.478343] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-226104fc-150e-46ca-b1ef-17cd2a991460 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.486407] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 887.486407] env[68244]: value = "task-2780483" [ 887.486407] env[68244]: _type = "Task" [ 887.486407] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.497871] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780483, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.650596] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.652407] env[68244]: DEBUG nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 887.653786] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.866s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.654273] env[68244]: DEBUG nova.objects.instance [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lazy-loading 'resources' on Instance uuid 100ec1f9-6776-4832-a4c2-e9a4def0d350 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.685813] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780481, 'name': CreateVM_Task, 'duration_secs': 0.460737} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.685986] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 887.686660] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.686901] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.687151] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 887.687441] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f9dec0-ad71-47c8-9147-a9e8f19a9c9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.692764] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 887.692764] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525d83db-a130-ef7f-b16c-333a0942a3ba" [ 887.692764] env[68244]: _type = "Task" [ 887.692764] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.702943] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525d83db-a130-ef7f-b16c-333a0942a3ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.859927] env[68244]: DEBUG nova.network.neutron [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updated VIF entry in instance network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 887.860331] env[68244]: DEBUG nova.network.neutron [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.874438] env[68244]: DEBUG oslo_concurrency.lockutils [req-70bc715a-5605-4191-844a-fe6d45a07400 req-a0da3eca-bf58-4afe-aba3-6fcc0c165016 service nova] Releasing lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.950948] env[68244]: DEBUG oslo_vmware.api [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780482, 'name': PowerOffVM_Task, 'duration_secs': 0.207} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.954267] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.954387] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 887.954580] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a96184a3-81b1-4d90-ab6f-fa1bb565a2d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.960917] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.996790] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780483, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071595} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.997069] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.997929] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd72c43-2f9e-4fbd-aaf6-7efdc55661b9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.022016] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 45ec526b-e9d8-4ea3-b0c8-af6da39b0158/45ec526b-e9d8-4ea3-b0c8-af6da39b0158.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.023457] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c7c7bcc-1e8c-4479-b4d0-5f8aed694d61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.040274] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.040274] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.040274] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Deleting the datastore file [datastore2] df4674a2-87de-4507-950a-5941fae93aab {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.040274] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c065aac-e650-44e9-bbaa-f538f2a0b9d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.046690] env[68244]: DEBUG oslo_vmware.api [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for the task: (returnval){ [ 888.046690] env[68244]: value = "task-2780486" [ 888.046690] env[68244]: _type = "Task" [ 888.046690] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.048056] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 888.048056] env[68244]: value = "task-2780485" [ 888.048056] env[68244]: _type = "Task" [ 888.048056] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.058400] env[68244]: DEBUG oslo_vmware.api [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.061479] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.159479] env[68244]: DEBUG nova.compute.utils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 888.163738] env[68244]: DEBUG nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 888.163965] env[68244]: DEBUG nova.network.neutron [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.213322] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525d83db-a130-ef7f-b16c-333a0942a3ba, 'name': SearchDatastore_Task, 'duration_secs': 0.009965} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.215023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.215023] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.215023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.215023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.215362] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.215625] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc2f3078-dc9e-470c-ad96-56e733355f76 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.224156] env[68244]: DEBUG nova.policy [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e2b78ca269843a0a5541e44727d807b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaf55a7bfa5948d1837855650c1c960b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 888.240882] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.240882] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.241646] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab6d8e31-a297-4203-816c-c743bc83afb0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.251536] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 888.251536] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529d5f10-8f9b-8ef0-d479-5ce8d7b400a6" [ 888.251536] env[68244]: _type = "Task" [ 888.251536] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.259706] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529d5f10-8f9b-8ef0-d479-5ce8d7b400a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.363637] env[68244]: DEBUG oslo_concurrency.lockutils [req-a35134d5-74dc-4805-8a91-6da8f3abeee0 req-0467b068-d753-4ae8-906b-79ce4aa22fc3 service nova] Releasing lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.463103] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780472, 'name': CloneVM_Task, 'duration_secs': 5.300683} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.463103] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Created linked-clone VM from snapshot [ 888.463492] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f701a3d-7662-4377-8141-9450ab671dd2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.472663] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Uploading image 965f2a77-66bb-4c10-984c-f5a53d6251b6 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 888.505873] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 888.505873] env[68244]: value = "vm-559029" [ 888.505873] env[68244]: _type = "VirtualMachine" [ 888.505873] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 888.509236] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bcc63d41-dbd0-4783-b11f-3f9f7a0846ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.511776] env[68244]: DEBUG nova.network.neutron [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Successfully created port: 97fdf60d-e090-463d-ae82-229571208a74 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.520306] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lease: (returnval){ [ 888.520306] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209fb62-a21d-1ea7-406d-5ae28a328bc9" [ 888.520306] env[68244]: _type = "HttpNfcLease" [ 888.520306] env[68244]: } obtained for exporting VM: (result){ [ 888.520306] env[68244]: value = "vm-559029" [ 888.520306] env[68244]: _type = "VirtualMachine" [ 888.520306] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 888.520902] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the lease: (returnval){ [ 888.520902] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209fb62-a21d-1ea7-406d-5ae28a328bc9" [ 888.520902] env[68244]: _type = "HttpNfcLease" [ 888.520902] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 888.533062] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 888.533062] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209fb62-a21d-1ea7-406d-5ae28a328bc9" [ 888.533062] env[68244]: _type = "HttpNfcLease" [ 888.533062] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 888.533062] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 888.533062] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209fb62-a21d-1ea7-406d-5ae28a328bc9" [ 888.533062] env[68244]: _type = "HttpNfcLease" [ 888.533062] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 888.533641] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6df4d7-6ef6-4687-9102-c963a1a83440 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.541701] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52138a93-c00c-abaa-779b-49e6c0e35cf8/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 888.542982] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52138a93-c00c-abaa-779b-49e6c0e35cf8/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 888.631300] env[68244]: DEBUG oslo_vmware.api [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Task: {'id': task-2780486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238578} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.634114] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 888.634615] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 888.634615] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 888.634735] env[68244]: INFO nova.compute.manager [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] [instance: df4674a2-87de-4507-950a-5941fae93aab] Took 1.21 seconds to destroy the instance on the hypervisor. [ 888.636219] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 888.636219] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780485, 'name': ReconfigVM_Task, 'duration_secs': 0.313593} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.636219] env[68244]: DEBUG nova.compute.manager [-] [instance: df4674a2-87de-4507-950a-5941fae93aab] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 888.636219] env[68244]: DEBUG nova.network.neutron [-] [instance: df4674a2-87de-4507-950a-5941fae93aab] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 888.637284] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 45ec526b-e9d8-4ea3-b0c8-af6da39b0158/45ec526b-e9d8-4ea3-b0c8-af6da39b0158.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.637981] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f05ea05-64b5-42ea-89d9-3bd3bb73cab9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.647221] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 888.647221] env[68244]: value = "task-2780488" [ 888.647221] env[68244]: _type = "Task" [ 888.647221] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.659149] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-56be216a-deb3-461c-9688-c7d2057c571a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.659576] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780488, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.666841] env[68244]: DEBUG nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 888.734290] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc94cdb-0528-49a5-a433-da635a9eaabe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.742390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae548e1-12fd-4d28-ba01-44f9d5a733d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.779705] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45595526-70ff-4a45-b145-b4b65a636bd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.787426] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529d5f10-8f9b-8ef0-d479-5ce8d7b400a6, 'name': SearchDatastore_Task, 'duration_secs': 0.019202} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.791786] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca4d8a69-083c-4d09-a21d-19596c9df025 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.792905] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b34e51-4d59-4b9b-9e15-5b02ffda93fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.799616] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 888.799616] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e5747e-0d66-6af3-50dc-71c627b6e689" [ 888.799616] env[68244]: _type = "Task" [ 888.799616] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.808179] env[68244]: DEBUG nova.compute.provider_tree [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.817405] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e5747e-0d66-6af3-50dc-71c627b6e689, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.162054] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780488, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.264581] env[68244]: DEBUG nova.compute.manager [req-6f951680-332d-41b8-b86b-be1e7341c5ee req-67a4ba86-c8c7-4f18-8f93-92cab39e90b4 service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Received event network-vif-deleted-578b7668-06da-421b-9436-cfdb36e12b83 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 889.266106] env[68244]: INFO nova.compute.manager [req-6f951680-332d-41b8-b86b-be1e7341c5ee req-67a4ba86-c8c7-4f18-8f93-92cab39e90b4 service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Neutron deleted interface 578b7668-06da-421b-9436-cfdb36e12b83; detaching it from the instance and deleting it from the info cache [ 889.266106] env[68244]: DEBUG nova.network.neutron [req-6f951680-332d-41b8-b86b-be1e7341c5ee req-67a4ba86-c8c7-4f18-8f93-92cab39e90b4 service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.314078] env[68244]: DEBUG nova.scheduler.client.report [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.325708] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e5747e-0d66-6af3-50dc-71c627b6e689, 'name': SearchDatastore_Task, 'duration_secs': 0.059318} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.325928] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.326214] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/cedcff81-0010-4fa6-95bf-72a4dcac5427.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.326495] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c27915b7-6c76-4530-b91a-d1ad4dabbd42 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.335215] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 889.335215] env[68244]: value = "task-2780489" [ 889.335215] env[68244]: _type = "Task" [ 889.335215] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.345767] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780489, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.661634] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780488, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.681938] env[68244]: DEBUG nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 889.703781] env[68244]: DEBUG nova.network.neutron [-] [instance: df4674a2-87de-4507-950a-5941fae93aab] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.712648] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 889.713146] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.713588] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 889.713899] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.714569] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 889.714887] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 889.715285] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 889.715895] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 889.716388] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 889.716710] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 889.718056] env[68244]: DEBUG nova.virt.hardware [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 889.718056] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210f2ed5-dd51-4a4d-b833-8d88af70653e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.733241] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50858f3b-4399-489f-a0ab-a2dd928d078d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.769378] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38af6f0d-cbaa-441e-8429-2bb9e7145d8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.786776] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b86cd6f-9296-4966-9721-3ec79b5c8c48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.834584] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.180s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.839651] env[68244]: DEBUG nova.compute.manager [req-6f951680-332d-41b8-b86b-be1e7341c5ee req-67a4ba86-c8c7-4f18-8f93-92cab39e90b4 service nova] [instance: df4674a2-87de-4507-950a-5941fae93aab] Detach interface failed, port_id=578b7668-06da-421b-9436-cfdb36e12b83, reason: Instance df4674a2-87de-4507-950a-5941fae93aab could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 889.841479] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.058s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.844238] env[68244]: INFO nova.compute.claims [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.863824] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780489, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.883406] env[68244]: INFO nova.scheduler.client.report [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted allocations for instance 100ec1f9-6776-4832-a4c2-e9a4def0d350 [ 890.161487] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780488, 'name': Rename_Task, 'duration_secs': 1.149148} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.161955] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.162235] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d3665cf-3099-4462-88c7-ffd042fd40ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.170732] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 890.170732] env[68244]: value = "task-2780490" [ 890.170732] env[68244]: _type = "Task" [ 890.170732] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.183658] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.211553] env[68244]: INFO nova.compute.manager [-] [instance: df4674a2-87de-4507-950a-5941fae93aab] Took 1.58 seconds to deallocate network for instance. [ 890.364121] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780489, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718798} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.364121] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/cedcff81-0010-4fa6-95bf-72a4dcac5427.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.364483] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.364778] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de9443b1-63e5-4f9b-b065-33119b9e622d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.373956] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 890.373956] env[68244]: value = "task-2780491" [ 890.373956] env[68244]: _type = "Task" [ 890.373956] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.382715] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780491, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.391566] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4f18015c-027c-4d37-b0d5-338a1d1599c0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "100ec1f9-6776-4832-a4c2-e9a4def0d350" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.103s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.516921] env[68244]: DEBUG nova.network.neutron [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Successfully updated port: 97fdf60d-e090-463d-ae82-229571208a74 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.686844] env[68244]: DEBUG oslo_vmware.api [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780490, 'name': PowerOnVM_Task, 'duration_secs': 0.484072} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.686844] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.686844] env[68244]: INFO nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Took 8.35 seconds to spawn the instance on the hypervisor. [ 890.686844] env[68244]: DEBUG nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 890.686844] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70662f8c-5c4b-4830-b27a-40026f19a22c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.723057] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.889078] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780491, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065633} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.889078] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 890.889233] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f489c75-ef8b-49bf-b0de-830fb129900f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.915927] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/cedcff81-0010-4fa6-95bf-72a4dcac5427.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.919317] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ab85852-3b96-4840-942f-be95dd9ec324 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.943320] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 890.943320] env[68244]: value = "task-2780492" [ 890.943320] env[68244]: _type = "Task" [ 890.943320] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.950798] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780492, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.021754] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.021998] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.022103] env[68244]: DEBUG nova.network.neutron [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.207868] env[68244]: INFO nova.compute.manager [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Took 45.09 seconds to build instance. [ 891.381857] env[68244]: DEBUG nova.compute.manager [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Received event network-vif-plugged-97fdf60d-e090-463d-ae82-229571208a74 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 891.382092] env[68244]: DEBUG oslo_concurrency.lockutils [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] Acquiring lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.382270] env[68244]: DEBUG oslo_concurrency.lockutils [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.384203] env[68244]: DEBUG oslo_concurrency.lockutils [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.384203] env[68244]: DEBUG nova.compute.manager [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] No waiting events found dispatching network-vif-plugged-97fdf60d-e090-463d-ae82-229571208a74 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 891.384203] env[68244]: WARNING nova.compute.manager [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Received unexpected event network-vif-plugged-97fdf60d-e090-463d-ae82-229571208a74 for instance with vm_state building and task_state spawning. [ 891.384203] env[68244]: DEBUG nova.compute.manager [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Received event network-changed-97fdf60d-e090-463d-ae82-229571208a74 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 891.384203] env[68244]: DEBUG nova.compute.manager [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Refreshing instance network info cache due to event network-changed-97fdf60d-e090-463d-ae82-229571208a74. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 891.384203] env[68244]: DEBUG oslo_concurrency.lockutils [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] Acquiring lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.444182] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd9ee17-2cc8-4456-b4bf-0902eee83d28 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.466873] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd5b017-9b07-4c73-a3aa-da7b86a7155c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.472276] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780492, 'name': ReconfigVM_Task, 'duration_secs': 0.476367} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.472658] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfigured VM instance instance-00000038 to attach disk [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/cedcff81-0010-4fa6-95bf-72a4dcac5427.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.474060] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-459d3bf1-e66c-47d3-b5fc-85cf3fe427d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.503938] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfa9c0f-320a-49c4-a7de-2d359e5f09a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.508572] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 891.508572] env[68244]: value = "task-2780493" [ 891.508572] env[68244]: _type = "Task" [ 891.508572] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.516205] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01afcc9e-9117-49f0-a641-9163c65f3be8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.523984] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780493, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.538310] env[68244]: DEBUG nova.compute.provider_tree [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.583023] env[68244]: DEBUG nova.network.neutron [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.711205] env[68244]: DEBUG oslo_concurrency.lockutils [None req-196ee1e6-64fe-4d37-9db6-ffb3f8448e33 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.619s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.851540] env[68244]: DEBUG nova.network.neutron [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance_info_cache with network_info: [{"id": "97fdf60d-e090-463d-ae82-229571208a74", "address": "fa:16:3e:0e:67:3c", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97fdf60d-e0", "ovs_interfaceid": "97fdf60d-e090-463d-ae82-229571208a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.026027] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780493, 'name': Rename_Task, 'duration_secs': 0.340737} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.026417] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.026722] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88aeb0ec-5f90-49ba-84f3-ba65f4d6ead6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.033930] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 892.033930] env[68244]: value = "task-2780494" [ 892.033930] env[68244]: _type = "Task" [ 892.033930] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.042708] env[68244]: DEBUG nova.scheduler.client.report [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.046047] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.354391] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.354747] env[68244]: DEBUG nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Instance network_info: |[{"id": "97fdf60d-e090-463d-ae82-229571208a74", "address": "fa:16:3e:0e:67:3c", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97fdf60d-e0", "ovs_interfaceid": "97fdf60d-e090-463d-ae82-229571208a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 892.355141] env[68244]: DEBUG oslo_concurrency.lockutils [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] Acquired lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.355342] env[68244]: DEBUG nova.network.neutron [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Refreshing network info cache for port 97fdf60d-e090-463d-ae82-229571208a74 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.356688] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:67:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '97fdf60d-e090-463d-ae82-229571208a74', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.368377] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.371693] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.372248] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c590077-3ef3-43d8-81e9-19323d632fe1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.397316] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.397316] env[68244]: value = "task-2780495" [ 892.397316] env[68244]: _type = "Task" [ 892.397316] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.413022] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780495, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.548969] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.549247] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.550857] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.551304] env[68244]: DEBUG nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 892.559893] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.555s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.560903] env[68244]: INFO nova.compute.claims [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.564481] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780494, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.642064] env[68244]: DEBUG nova.network.neutron [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updated VIF entry in instance network info cache for port 97fdf60d-e090-463d-ae82-229571208a74. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.642453] env[68244]: DEBUG nova.network.neutron [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance_info_cache with network_info: [{"id": "97fdf60d-e090-463d-ae82-229571208a74", "address": "fa:16:3e:0e:67:3c", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97fdf60d-e0", "ovs_interfaceid": "97fdf60d-e090-463d-ae82-229571208a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.907338] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780495, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.987924] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "c70fb986-8396-4f11-98c4-1ed977a23bcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.988172] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.044562] env[68244]: DEBUG oslo_vmware.api [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780494, 'name': PowerOnVM_Task, 'duration_secs': 0.868052} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.044831] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.045182] env[68244]: INFO nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Took 8.15 seconds to spawn the instance on the hypervisor. [ 893.045417] env[68244]: DEBUG nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 893.046513] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72de9e73-2588-4c13-9f9e-b376a0a6285a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.056536] env[68244]: DEBUG nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 893.066165] env[68244]: DEBUG nova.compute.utils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 893.067827] env[68244]: DEBUG nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 893.067827] env[68244]: DEBUG nova.network.neutron [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 893.139135] env[68244]: DEBUG nova.policy [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8587147149b84a34bfbbd01e2bb637b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bf57141eff643a8b03f3b0576678ec1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 893.144989] env[68244]: DEBUG oslo_concurrency.lockutils [req-89c5bfe8-2c93-454a-a82d-a312423f880f req-38664578-49f4-4092-ad45-f395f0ce40a9 service nova] Releasing lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.411240] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780495, 'name': CreateVM_Task, 'duration_secs': 0.549825} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.411959] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.416025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.416025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.416025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 893.416025] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a9067ac-4278-4639-ae8a-88df8b47ab5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.418676] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 893.418676] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527b7146-59f0-246f-8957-329e2bf314fe" [ 893.418676] env[68244]: _type = "Task" [ 893.418676] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.427061] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527b7146-59f0-246f-8957-329e2bf314fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.437626] env[68244]: DEBUG nova.network.neutron [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Successfully created port: c9ac021e-cd9a-4092-8f49-fd149000b0aa {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.491059] env[68244]: DEBUG nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 893.570447] env[68244]: INFO nova.compute.manager [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Took 42.70 seconds to build instance. [ 893.576463] env[68244]: DEBUG nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 893.609884] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.929782] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527b7146-59f0-246f-8957-329e2bf314fe, 'name': SearchDatastore_Task, 'duration_secs': 0.013614} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.932507] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.932759] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.932995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.933162] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.933344] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.933810] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95c2b9b5-8521-486c-a3ef-eb6eb91beff2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.946126] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.946330] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.947078] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5339ffc4-dcd9-4abf-990f-91f6404655d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.952775] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 893.952775] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b7609-733c-89ec-0d9a-66f5432d42e8" [ 893.952775] env[68244]: _type = "Task" [ 893.952775] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.963174] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b7609-733c-89ec-0d9a-66f5432d42e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.018836] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.030227] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68aea399-600d-45d4-b76f-be8076ea9357 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.038362] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7004d5f2-b0a5-4fac-bb74-21e746342a4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.070443] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2911c7-b4a7-4246-bbd1-dd6c252be75a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.073276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-120a9835-4802-4197-b8cf-8e2bbf6859f6 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.174s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.078703] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58aa6328-f3c3-49b4-9613-1a8c23f3c07c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.096384] env[68244]: DEBUG nova.compute.provider_tree [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.464288] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b7609-733c-89ec-0d9a-66f5432d42e8, 'name': SearchDatastore_Task, 'duration_secs': 0.029148} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.465109] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78b59788-ced9-487f-b8d8-0fc091863a2b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.470618] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 894.470618] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e0c958-50b3-15af-241f-6795ceed0100" [ 894.470618] env[68244]: _type = "Task" [ 894.470618] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.478909] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e0c958-50b3-15af-241f-6795ceed0100, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.600340] env[68244]: DEBUG nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 894.603591] env[68244]: DEBUG nova.scheduler.client.report [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.640517] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:23:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='62be5865-e959-4fad-8733-6e5a5e5fb9d8',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-283340065',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 894.640765] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.640897] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 894.642305] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.642516] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 894.642677] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 894.642898] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 894.643067] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 894.643236] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 894.643401] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 894.643574] env[68244]: DEBUG nova.virt.hardware [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 894.644772] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9b1eb8-a72a-4f64-9526-317278555fa4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.654109] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06894b5-ab8f-4150-84f6-31230e8f5295 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.936932] env[68244]: DEBUG nova.compute.manager [req-2bf07b4f-91e9-419f-8389-324d2b7fa111 req-1d6f8b4b-b635-43d2-a4c6-b386d09f4b5c service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Received event network-vif-plugged-c9ac021e-cd9a-4092-8f49-fd149000b0aa {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 894.937222] env[68244]: DEBUG oslo_concurrency.lockutils [req-2bf07b4f-91e9-419f-8389-324d2b7fa111 req-1d6f8b4b-b635-43d2-a4c6-b386d09f4b5c service nova] Acquiring lock "d74a0d56-8656-429c-a703-fca87e07798f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.937449] env[68244]: DEBUG oslo_concurrency.lockutils [req-2bf07b4f-91e9-419f-8389-324d2b7fa111 req-1d6f8b4b-b635-43d2-a4c6-b386d09f4b5c service nova] Lock "d74a0d56-8656-429c-a703-fca87e07798f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.937613] env[68244]: DEBUG oslo_concurrency.lockutils [req-2bf07b4f-91e9-419f-8389-324d2b7fa111 req-1d6f8b4b-b635-43d2-a4c6-b386d09f4b5c service nova] Lock "d74a0d56-8656-429c-a703-fca87e07798f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.937985] env[68244]: DEBUG nova.compute.manager [req-2bf07b4f-91e9-419f-8389-324d2b7fa111 req-1d6f8b4b-b635-43d2-a4c6-b386d09f4b5c service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] No waiting events found dispatching network-vif-plugged-c9ac021e-cd9a-4092-8f49-fd149000b0aa {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.938194] env[68244]: WARNING nova.compute.manager [req-2bf07b4f-91e9-419f-8389-324d2b7fa111 req-1d6f8b4b-b635-43d2-a4c6-b386d09f4b5c service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Received unexpected event network-vif-plugged-c9ac021e-cd9a-4092-8f49-fd149000b0aa for instance with vm_state building and task_state spawning. [ 894.981740] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e0c958-50b3-15af-241f-6795ceed0100, 'name': SearchDatastore_Task, 'duration_secs': 0.012428} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.982649] env[68244]: DEBUG nova.network.neutron [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Successfully updated port: c9ac021e-cd9a-4092-8f49-fd149000b0aa {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.983818] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.984101] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2487689d-7a83-49d7-be78-fbb946ebef8c/2487689d-7a83-49d7-be78-fbb946ebef8c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.984577] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3264719e-c2a0-4cab-ab5a-4619dbba43cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.991697] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 894.991697] env[68244]: value = "task-2780496" [ 894.991697] env[68244]: _type = "Task" [ 894.991697] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.000633] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.109475] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.110054] env[68244]: DEBUG nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 895.113147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.810s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.113388] env[68244]: DEBUG nova.objects.instance [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lazy-loading 'resources' on Instance uuid 8c00240d-5124-4ada-bd4d-4acd39a345c8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.374802] env[68244]: DEBUG nova.compute.manager [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Received event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 895.377197] env[68244]: DEBUG nova.compute.manager [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing instance network info cache due to event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 895.377197] env[68244]: DEBUG oslo_concurrency.lockutils [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] Acquiring lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.377197] env[68244]: DEBUG oslo_concurrency.lockutils [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] Acquired lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.377197] env[68244]: DEBUG nova.network.neutron [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.485899] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.486056] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.486248] env[68244]: DEBUG nova.network.neutron [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.501501] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780496, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.616347] env[68244]: DEBUG nova.compute.utils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 895.618179] env[68244]: DEBUG nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 895.618179] env[68244]: DEBUG nova.network.neutron [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 895.663950] env[68244]: DEBUG nova.policy [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6af77f00c84d4e99bea878bc30dcc361', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '821b99c053aa45b4b6b8fb09eb63aa73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 896.003773] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519876} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.004037] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2487689d-7a83-49d7-be78-fbb946ebef8c/2487689d-7a83-49d7-be78-fbb946ebef8c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.004287] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.004505] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9204cca9-4b00-4ff6-9064-40c81776f9e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.018693] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 896.018693] env[68244]: value = "task-2780497" [ 896.018693] env[68244]: _type = "Task" [ 896.018693] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.031128] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.057535] env[68244]: DEBUG nova.network.neutron [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.098350] env[68244]: DEBUG nova.network.neutron [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Successfully created port: aec282ae-c918-4d46-993a-8beba0b62926 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 896.121110] env[68244]: DEBUG nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 896.132109] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fe0b75-d4a4-45c3-a588-3288e47f8db5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.144681] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60cf2a7-f8fe-45ac-87fa-02007ade6b7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.184572] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fadf3be-2844-4385-b0d1-cd5405a7da8b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.195769] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855cdf3a-8f50-4751-bd40-6e9e40a1b71d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.213522] env[68244]: DEBUG nova.compute.provider_tree [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.356807] env[68244]: DEBUG nova.network.neutron [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updated VIF entry in instance network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.356807] env[68244]: DEBUG nova.network.neutron [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.373790] env[68244]: DEBUG nova.network.neutron [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [{"id": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "address": "fa:16:3e:92:a5:cb", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ac021e-cd", "ovs_interfaceid": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.532654] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780497, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134962} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.532924] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 896.533730] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5346bb8f-1d8e-4d35-96e4-8d4de980c872 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.557366] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 2487689d-7a83-49d7-be78-fbb946ebef8c/2487689d-7a83-49d7-be78-fbb946ebef8c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.557650] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2e63cc9-ffa9-44a4-a083-e09b0025283b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.577758] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 896.577758] env[68244]: value = "task-2780498" [ 896.577758] env[68244]: _type = "Task" [ 896.577758] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.586071] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780498, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.717328] env[68244]: DEBUG nova.scheduler.client.report [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.861986] env[68244]: DEBUG oslo_concurrency.lockutils [req-235bfd0e-a5cb-4c08-b35f-2b47a9f1792e req-59d050df-0c6d-4407-a6f4-dcee173eecda service nova] Releasing lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.876332] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.876659] env[68244]: DEBUG nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Instance network_info: |[{"id": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "address": "fa:16:3e:92:a5:cb", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ac021e-cd", "ovs_interfaceid": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 896.877288] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:a5:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9ac021e-cd9a-4092-8f49-fd149000b0aa', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.889230] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.889600] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.889839] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78ad6481-1849-4a04-b02c-f1cc803647b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.912865] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.912865] env[68244]: value = "task-2780499" [ 896.912865] env[68244]: _type = "Task" [ 896.912865] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.921373] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780499, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.969684] env[68244]: DEBUG nova.compute.manager [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Received event network-changed-c9ac021e-cd9a-4092-8f49-fd149000b0aa {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 896.970165] env[68244]: DEBUG nova.compute.manager [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Refreshing instance network info cache due to event network-changed-c9ac021e-cd9a-4092-8f49-fd149000b0aa. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 896.970281] env[68244]: DEBUG oslo_concurrency.lockutils [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] Acquiring lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.970521] env[68244]: DEBUG oslo_concurrency.lockutils [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] Acquired lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.970783] env[68244]: DEBUG nova.network.neutron [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Refreshing network info cache for port c9ac021e-cd9a-4092-8f49-fd149000b0aa {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 897.090571] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780498, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.130664] env[68244]: DEBUG nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 897.155193] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 897.155478] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.155649] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.155831] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.155978] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.156134] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 897.156360] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 897.156516] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 897.156701] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 897.156869] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 897.157050] env[68244]: DEBUG nova.virt.hardware [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 897.158055] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2ae0d1-fd72-4474-8512-0bb6c14a875c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.166538] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979f345d-1e4c-4bc5-921a-9f7f6fe1aa11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.222329] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.224950] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.538s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.225237] env[68244]: DEBUG nova.objects.instance [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lazy-loading 'resources' on Instance uuid aa7c6967-cd55-47fc-a2f5-db6e8d2e0307 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.246384] env[68244]: INFO nova.scheduler.client.report [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted allocations for instance 8c00240d-5124-4ada-bd4d-4acd39a345c8 [ 897.424180] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780499, 'name': CreateVM_Task, 'duration_secs': 0.397562} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.424391] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.425104] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.425311] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.425628] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 897.425893] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9af15668-9d26-47ed-8c12-534896fca775 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.430780] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 897.430780] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5200ce39-dc12-2956-4801-91b72c6bd7a9" [ 897.430780] env[68244]: _type = "Task" [ 897.430780] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.439465] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5200ce39-dc12-2956-4801-91b72c6bd7a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.591379] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780498, 'name': ReconfigVM_Task, 'duration_secs': 0.780762} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.591695] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 2487689d-7a83-49d7-be78-fbb946ebef8c/2487689d-7a83-49d7-be78-fbb946ebef8c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.592446] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a38e703-802b-48c3-944e-cbb88cabe06e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.598969] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 897.598969] env[68244]: value = "task-2780500" [ 897.598969] env[68244]: _type = "Task" [ 897.598969] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.608677] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780500, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.754178] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14731637-600b-4ed8-abfd-09438d88edf1 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8c00240d-5124-4ada-bd4d-4acd39a345c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.239s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.945076] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5200ce39-dc12-2956-4801-91b72c6bd7a9, 'name': SearchDatastore_Task, 'duration_secs': 0.017406} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.945076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.945076] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.945076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.945076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.945076] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.945076] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f85dfe1-d4f6-49ee-9026-4fc3da261880 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.954532] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.954992] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.955939] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0eb09ef-dc18-477e-a4ce-7455f5c71115 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.961794] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 897.961794] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ce87fc-dd68-bfa7-d892-290e471efa46" [ 897.961794] env[68244]: _type = "Task" [ 897.961794] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.970848] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ce87fc-dd68-bfa7-d892-290e471efa46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.108802] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780500, 'name': Rename_Task, 'duration_secs': 0.237662} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.117954] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.120882] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81c6a092-06a8-4eeb-9900-2968c385496a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.129956] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 898.129956] env[68244]: value = "task-2780501" [ 898.129956] env[68244]: _type = "Task" [ 898.129956] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.141515] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.181234] env[68244]: DEBUG nova.network.neutron [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updated VIF entry in instance network info cache for port c9ac021e-cd9a-4092-8f49-fd149000b0aa. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.181234] env[68244]: DEBUG nova.network.neutron [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [{"id": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "address": "fa:16:3e:92:a5:cb", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ac021e-cd", "ovs_interfaceid": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.222629] env[68244]: DEBUG nova.network.neutron [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Successfully updated port: aec282ae-c918-4d46-993a-8beba0b62926 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 898.472425] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ce87fc-dd68-bfa7-d892-290e471efa46, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.475658] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b957fc4-9035-4d01-b48f-0213b1557301 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.481541] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 898.481541] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e32a20-3fe0-fc2b-b394-9bda1a53225b" [ 898.481541] env[68244]: _type = "Task" [ 898.481541] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.490541] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e32a20-3fe0-fc2b-b394-9bda1a53225b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.529259] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e9d447-5396-4dee-b58e-a5574706062d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.536957] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a020cb1c-34e5-4a2d-9ca8-2eaa77cd353d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.567635] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c20e4b-4209-4abb-88b4-5d2e3ce5b091 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.575745] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5781b10f-a4f8-42bd-84f3-2bda90ccfe82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.589351] env[68244]: DEBUG nova.compute.provider_tree [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.641994] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780501, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.684447] env[68244]: DEBUG oslo_concurrency.lockutils [req-e7caea70-df0c-4c6e-bb37-6b024ebf743f req-036a6a20-a2f6-4428-86ea-07b3c3780173 service nova] Releasing lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.733269] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "refresh_cache-c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.733269] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "refresh_cache-c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.733269] env[68244]: DEBUG nova.network.neutron [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.992294] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e32a20-3fe0-fc2b-b394-9bda1a53225b, 'name': SearchDatastore_Task, 'duration_secs': 0.012217} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.992573] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.992832] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.993107] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-598f5dd4-f0ce-4043-9415-aa17b5ca1ae9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.998497] env[68244]: DEBUG nova.compute.manager [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Received event network-vif-plugged-aec282ae-c918-4d46-993a-8beba0b62926 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 898.998717] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] Acquiring lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.998915] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.999090] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.999259] env[68244]: DEBUG nova.compute.manager [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] No waiting events found dispatching network-vif-plugged-aec282ae-c918-4d46-993a-8beba0b62926 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 898.999423] env[68244]: WARNING nova.compute.manager [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Received unexpected event network-vif-plugged-aec282ae-c918-4d46-993a-8beba0b62926 for instance with vm_state building and task_state spawning. [ 898.999578] env[68244]: DEBUG nova.compute.manager [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Received event network-changed-aec282ae-c918-4d46-993a-8beba0b62926 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 898.999727] env[68244]: DEBUG nova.compute.manager [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Refreshing instance network info cache due to event network-changed-aec282ae-c918-4d46-993a-8beba0b62926. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 898.999892] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] Acquiring lock "refresh_cache-c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.005548] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 899.005548] env[68244]: value = "task-2780502" [ 899.005548] env[68244]: _type = "Task" [ 899.005548] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.013602] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780502, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.093048] env[68244]: DEBUG nova.scheduler.client.report [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 899.145196] env[68244]: DEBUG oslo_vmware.api [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780501, 'name': PowerOnVM_Task, 'duration_secs': 0.992428} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.145196] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.145525] env[68244]: INFO nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Took 9.46 seconds to spawn the instance on the hypervisor. [ 899.145702] env[68244]: DEBUG nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.146667] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b07e91a-e437-489d-ba70-c88613fa6059 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.281404] env[68244]: DEBUG nova.network.neutron [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 899.399464] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.399808] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.457332] env[68244]: DEBUG nova.network.neutron [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Updating instance_info_cache with network_info: [{"id": "aec282ae-c918-4d46-993a-8beba0b62926", "address": "fa:16:3e:fd:d7:18", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaec282ae-c9", "ovs_interfaceid": "aec282ae-c918-4d46-993a-8beba0b62926", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.516435] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780502, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.598757] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.374s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.601218] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.507s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.601499] env[68244]: DEBUG nova.objects.instance [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lazy-loading 'resources' on Instance uuid 2d9dbf75-992d-4932-bd5d-84462494ebe8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.621219] env[68244]: INFO nova.scheduler.client.report [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Deleted allocations for instance aa7c6967-cd55-47fc-a2f5-db6e8d2e0307 [ 899.665753] env[68244]: INFO nova.compute.manager [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Took 46.67 seconds to build instance. [ 899.775636] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52138a93-c00c-abaa-779b-49e6c0e35cf8/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 899.776590] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6b1dc7-c1fc-4814-8fdb-d585a1bb1b04 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.783155] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52138a93-c00c-abaa-779b-49e6c0e35cf8/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 899.783401] env[68244]: ERROR oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52138a93-c00c-abaa-779b-49e6c0e35cf8/disk-0.vmdk due to incomplete transfer. [ 899.783542] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-37664fe9-21dd-42fb-a7cc-b4a1218e58b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.790713] env[68244]: DEBUG oslo_vmware.rw_handles [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52138a93-c00c-abaa-779b-49e6c0e35cf8/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 899.791055] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Uploaded image 965f2a77-66bb-4c10-984c-f5a53d6251b6 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 899.794277] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 899.794589] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8dc4d0f4-47c4-4766-af16-128ff713e705 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.801483] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 899.801483] env[68244]: value = "task-2780503" [ 899.801483] env[68244]: _type = "Task" [ 899.801483] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.811510] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780503, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.902276] env[68244]: DEBUG nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.964203] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "refresh_cache-c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.964517] env[68244]: DEBUG nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Instance network_info: |[{"id": "aec282ae-c918-4d46-993a-8beba0b62926", "address": "fa:16:3e:fd:d7:18", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaec282ae-c9", "ovs_interfaceid": "aec282ae-c918-4d46-993a-8beba0b62926", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 899.964818] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] Acquired lock "refresh_cache-c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.964998] env[68244]: DEBUG nova.network.neutron [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Refreshing network info cache for port aec282ae-c918-4d46-993a-8beba0b62926 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.966187] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:d7:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aec282ae-c918-4d46-993a-8beba0b62926', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.973846] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.974582] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.974882] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f59cda0-7c70-4a8f-abe2-5c13c7181d20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.997176] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.997176] env[68244]: value = "task-2780504" [ 899.997176] env[68244]: _type = "Task" [ 899.997176] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.007203] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780504, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.014919] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780502, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524622} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.015208] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 900.015426] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 900.015666] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1c0885e-2157-4121-90eb-3e2b77f81163 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.024611] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 900.024611] env[68244]: value = "task-2780505" [ 900.024611] env[68244]: _type = "Task" [ 900.024611] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.032170] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.129819] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6a12a88b-e516-48ab-9413-c171e511f604 tempest-FloatingIPsAssociationTestJSON-32244975 tempest-FloatingIPsAssociationTestJSON-32244975-project-member] Lock "aa7c6967-cd55-47fc-a2f5-db6e8d2e0307" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.076s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.168181] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7100474c-a9cd-4e98-ab1c-c7f384b58274 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.572s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.313437] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780503, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.477876] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.509929] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780504, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.533436] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124396} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.536168] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.537536] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b577f1-2793-4605-a06b-2ab055cef3a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.563039] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.568031] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-648eba6c-f71b-4f1a-91d9-be8e5e7d7656 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.589449] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 900.589449] env[68244]: value = "task-2780506" [ 900.589449] env[68244]: _type = "Task" [ 900.589449] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.600077] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780506, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.659129] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be784c9-a3eb-4ad9-8588-b4fa9a0798d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.667690] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64055330-f011-43d3-a742-ee78f73aeff4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.713411] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3441c2df-3fdb-45f0-b41f-9c49b403bd09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.722797] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3b3fac-0784-4a53-b0f7-f4cae3c4342d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.737991] env[68244]: DEBUG nova.compute.provider_tree [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 900.812506] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780503, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.821635] env[68244]: DEBUG nova.network.neutron [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Updated VIF entry in instance network info cache for port aec282ae-c918-4d46-993a-8beba0b62926. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.821990] env[68244]: DEBUG nova.network.neutron [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Updating instance_info_cache with network_info: [{"id": "aec282ae-c918-4d46-993a-8beba0b62926", "address": "fa:16:3e:fd:d7:18", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaec282ae-c9", "ovs_interfaceid": "aec282ae-c918-4d46-993a-8beba0b62926", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.007580] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780504, 'name': CreateVM_Task, 'duration_secs': 0.756114} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.007701] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 901.008460] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.008670] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.008932] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 901.009206] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79c4809f-b1c1-4ae1-838e-398d70cd2c7c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.013774] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 901.013774] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52376e9f-bc18-83b8-8afd-6a559689e40a" [ 901.013774] env[68244]: _type = "Task" [ 901.013774] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.022141] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52376e9f-bc18-83b8-8afd-6a559689e40a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.100886] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.181748] env[68244]: DEBUG nova.compute.manager [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 901.258093] env[68244]: ERROR nova.scheduler.client.report [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [req-041c23c4-ae74-44bd-bd7c-9cd289fb6fc0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-041c23c4-ae74-44bd-bd7c-9cd289fb6fc0"}]} [ 901.273500] env[68244]: DEBUG nova.scheduler.client.report [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 901.291528] env[68244]: DEBUG nova.scheduler.client.report [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 901.291761] env[68244]: DEBUG nova.compute.provider_tree [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 901.307276] env[68244]: DEBUG nova.scheduler.client.report [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 901.314517] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780503, 'name': Destroy_Task, 'duration_secs': 1.095807} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.314772] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Destroyed the VM [ 901.315034] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 901.315273] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-566da67d-d763-4b98-b4c8-5bcd6bcf6006 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.322635] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 901.322635] env[68244]: value = "task-2780507" [ 901.322635] env[68244]: _type = "Task" [ 901.322635] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.326642] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdee5724-a781-42c9-92b5-08060db594f4 req-9ec23c68-b465-4fa0-915a-f2ee357872fd service nova] Releasing lock "refresh_cache-c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.332236] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780507, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.341305] env[68244]: DEBUG nova.scheduler.client.report [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 901.535821] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52376e9f-bc18-83b8-8afd-6a559689e40a, 'name': SearchDatastore_Task, 'duration_secs': 0.009878} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.536432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.536687] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.536934] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.537143] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.537290] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.537569] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3917988f-2ff2-42ff-84a4-044a40d6bb9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.547124] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.547375] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.550731] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36b4ee5b-fce3-4d65-9944-738a26a4e9d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.557111] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 901.557111] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5269265b-29ff-345c-2670-0e7344e7ecff" [ 901.557111] env[68244]: _type = "Task" [ 901.557111] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.564546] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5269265b-29ff-345c-2670-0e7344e7ecff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.603806] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.699629] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.839531] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780507, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.876717] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0b41e4-adb9-4565-83bf-e5d208e28ad8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.884586] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2028f076-9e94-4d6e-9f0c-66013e2eb21a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.919464] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c55c3e-0c57-40b2-b611-40f3f617682b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.930338] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-909e4ac8-d810-4a8e-9645-0233996849d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.948392] env[68244]: DEBUG nova.compute.provider_tree [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 902.066725] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5269265b-29ff-345c-2670-0e7344e7ecff, 'name': SearchDatastore_Task, 'duration_secs': 0.00972} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.067661] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9918a716-dd7e-417f-8d1e-a2d486f17849 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.072897] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 902.072897] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cfe4da-814d-f854-6639-dc7c2a5f1ce1" [ 902.072897] env[68244]: _type = "Task" [ 902.072897] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.081857] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cfe4da-814d-f854-6639-dc7c2a5f1ce1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.102044] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780506, 'name': ReconfigVM_Task, 'duration_secs': 1.351947} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.102298] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfigured VM instance instance-0000003a to attach disk [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.102821] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86ba7d80-1ea4-40ff-b730-ecc76475251b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.110022] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 902.110022] env[68244]: value = "task-2780508" [ 902.110022] env[68244]: _type = "Task" [ 902.110022] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.120268] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780508, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.333899] env[68244]: DEBUG oslo_vmware.api [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780507, 'name': RemoveSnapshot_Task, 'duration_secs': 0.84324} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.334198] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 902.334198] env[68244]: INFO nova.compute.manager [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Took 21.53 seconds to snapshot the instance on the hypervisor. [ 902.483672] env[68244]: DEBUG nova.scheduler.client.report [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 902.483672] env[68244]: DEBUG nova.compute.provider_tree [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 86 to 87 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 902.483672] env[68244]: DEBUG nova.compute.provider_tree [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 902.583901] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cfe4da-814d-f854-6639-dc7c2a5f1ce1, 'name': SearchDatastore_Task, 'duration_secs': 0.009739} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.584076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.585036] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c73d39d9-1fb7-4ce7-8d60-9243bd6f519f/c73d39d9-1fb7-4ce7-8d60-9243bd6f519f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 902.585036] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e45863b-3522-4a0e-b8d5-fe6a1a265ed5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.591761] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 902.591761] env[68244]: value = "task-2780509" [ 902.591761] env[68244]: _type = "Task" [ 902.591761] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.599266] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.622021] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780508, 'name': Rename_Task, 'duration_secs': 0.146922} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.622021] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.622021] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f104e8e6-d62f-40d6-abf7-1759ed0659dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.626363] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 902.626363] env[68244]: value = "task-2780510" [ 902.626363] env[68244]: _type = "Task" [ 902.626363] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.634561] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.885512] env[68244]: DEBUG nova.compute.manager [None req-303b49ff-2e38-4764-94fc-67950d4957ca tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Found 2 images (rotation: 2) {{(pid=68244) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 902.989761] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.388s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.996627] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.604s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.997026] env[68244]: DEBUG nova.objects.instance [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lazy-loading 'resources' on Instance uuid 774ce6f8-6273-4f2b-b398-ee8c44d79520 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.026982] env[68244]: INFO nova.scheduler.client.report [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleted allocations for instance 2d9dbf75-992d-4932-bd5d-84462494ebe8 [ 903.105162] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780509, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.138565] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780510, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.536365] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19ae7f58-38d2-416f-9222-c7dad777c749 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "2d9dbf75-992d-4932-bd5d-84462494ebe8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.867s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.544719] env[68244]: DEBUG nova.compute.manager [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.545649] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995ef452-1a1f-4a6c-884f-6e512fd06eb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.605468] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576133} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.608005] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] c73d39d9-1fb7-4ce7-8d60-9243bd6f519f/c73d39d9-1fb7-4ce7-8d60-9243bd6f519f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.608233] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.608664] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5181fe2-f903-43b6-864a-f9af15408b3e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.615778] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 903.615778] env[68244]: value = "task-2780511" [ 903.615778] env[68244]: _type = "Task" [ 903.615778] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.625750] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.635658] env[68244]: DEBUG oslo_vmware.api [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780510, 'name': PowerOnVM_Task, 'duration_secs': 0.59954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.638177] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.638379] env[68244]: INFO nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Took 9.04 seconds to spawn the instance on the hypervisor. [ 903.638550] env[68244]: DEBUG nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.639625] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d075e9-e6fd-4157-a295-789a6f780450 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.983912] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee3e8ae-e016-4f7b-8c8a-777adbf823d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.991994] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6368c57e-8727-4e98-bcd9-e4e0515889c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.021571] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83cf918-2a98-4070-801e-16c27bc5a815 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.030254] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47087834-c064-4bd9-b00b-c2a9192feafe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.044555] env[68244]: DEBUG nova.compute.provider_tree [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.060754] env[68244]: INFO nova.compute.manager [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] instance snapshotting [ 904.061367] env[68244]: DEBUG nova.objects.instance [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'flavor' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.126258] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062774} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.126258] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 904.127948] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f28965-ba40-407f-9de4-28574f5c3a80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.154781] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] c73d39d9-1fb7-4ce7-8d60-9243bd6f519f/c73d39d9-1fb7-4ce7-8d60-9243bd6f519f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 904.160239] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4064cda-f5ca-41e3-adfa-6d4b1380a03f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.181256] env[68244]: INFO nova.compute.manager [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Took 48.42 seconds to build instance. [ 904.185464] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 904.185464] env[68244]: value = "task-2780512" [ 904.185464] env[68244]: _type = "Task" [ 904.185464] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.196056] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780512, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.548627] env[68244]: DEBUG nova.scheduler.client.report [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.566471] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369bb435-1649-4fa2-81e3-c047d21fa8c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.587343] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf173cbe-4062-4610-9e85-39674fb72108 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.686552] env[68244]: DEBUG oslo_concurrency.lockutils [None req-094a19ba-9c4b-49aa-a0d6-b8f786d7fa62 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.065s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.696183] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.853430] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.853663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.054793] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.058395] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.562s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.058683] env[68244]: DEBUG nova.objects.instance [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lazy-loading 'resources' on Instance uuid 6915d271-8346-41b5-a75b-2188fd3b57d1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.086335] env[68244]: INFO nova.scheduler.client.report [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Deleted allocations for instance 774ce6f8-6273-4f2b-b398-ee8c44d79520 [ 905.098699] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 905.099166] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7fa26e3e-0da0-4f13-924a-f0e8007fb367 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.114022] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 905.114022] env[68244]: value = "task-2780513" [ 905.114022] env[68244]: _type = "Task" [ 905.114022] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.123450] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780513, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.196375] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780512, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.358058] env[68244]: DEBUG nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.596043] env[68244]: DEBUG oslo_concurrency.lockutils [None req-77048307-58b1-4e8c-a7cc-6e20e91949ff tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "774ce6f8-6273-4f2b-b398-ee8c44d79520" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.747s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.632716] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780513, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.699850] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780512, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.743577] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.743785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.882901] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.971402] env[68244]: DEBUG nova.compute.manager [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 906.107562] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8eadde-a544-4ead-a46b-f8e14aab0c93 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.116715] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8171b667-b2da-4970-a92c-2b2813638bb2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.131177] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780513, 'name': CreateSnapshot_Task, 'duration_secs': 0.902045} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.156961] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 906.158410] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb56526-ee48-4558-8e06-ad4bf56e6ad6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.161569] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff2a145-5a53-4cca-aff0-231528cf7941 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.174413] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436e3aef-2edf-4042-b534-74501320a871 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.187638] env[68244]: DEBUG nova.compute.provider_tree [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.198216] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780512, 'name': ReconfigVM_Task, 'duration_secs': 1.917616} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.199038] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Reconfigured VM instance instance-0000003b to attach disk [datastore2] c73d39d9-1fb7-4ce7-8d60-9243bd6f519f/c73d39d9-1fb7-4ce7-8d60-9243bd6f519f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.199674] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46cc4cdf-c36b-434a-9da0-d5914be4a257 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.206066] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 906.206066] env[68244]: value = "task-2780514" [ 906.206066] env[68244]: _type = "Task" [ 906.206066] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.214385] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780514, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.246812] env[68244]: DEBUG nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 906.501157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.681896] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 906.682348] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cb1675eb-5832-4ffb-a82b-7557aef1a0fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.691169] env[68244]: DEBUG nova.scheduler.client.report [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.695509] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 906.695509] env[68244]: value = "task-2780515" [ 906.695509] env[68244]: _type = "Task" [ 906.695509] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.704367] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780515, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.714819] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780514, 'name': Rename_Task, 'duration_secs': 0.385738} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.715088] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.715315] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb0e9a0d-aca2-4da0-b0df-043d7476af2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.720945] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 906.720945] env[68244]: value = "task-2780516" [ 906.720945] env[68244]: _type = "Task" [ 906.720945] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.728237] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780516, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.766443] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.029516] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.029882] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.030147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.030375] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.030643] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.032819] env[68244]: INFO nova.compute.manager [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Terminating instance [ 907.197659] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.139s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.200300] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.095s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.201899] env[68244]: INFO nova.compute.claims [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.214847] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780515, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.220813] env[68244]: INFO nova.scheduler.client.report [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleted allocations for instance 6915d271-8346-41b5-a75b-2188fd3b57d1 [ 907.235170] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780516, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.536574] env[68244]: DEBUG nova.compute.manager [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.536952] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.537936] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a81d79a-f4c5-4864-9de1-409a54ebcc47 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.545889] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.546198] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afb9b4e4-f57a-4614-aa97-b2126cfb98ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.552014] env[68244]: DEBUG oslo_vmware.api [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 907.552014] env[68244]: value = "task-2780517" [ 907.552014] env[68244]: _type = "Task" [ 907.552014] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.560719] env[68244]: DEBUG oslo_vmware.api [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.708971] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780515, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.735468] env[68244]: DEBUG oslo_vmware.api [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780516, 'name': PowerOnVM_Task, 'duration_secs': 0.593682} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.735912] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3cde25e-1811-453f-b371-bd713bcffb94 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "6915d271-8346-41b5-a75b-2188fd3b57d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.803s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.736767] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.736966] env[68244]: INFO nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Took 10.61 seconds to spawn the instance on the hypervisor. [ 907.737158] env[68244]: DEBUG nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.738121] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8708f3ab-092d-4fd8-816c-c602408d461f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.062905] env[68244]: DEBUG oslo_vmware.api [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780517, 'name': PowerOffVM_Task, 'duration_secs': 0.35647} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.063343] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.063343] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.063666] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c33d7de3-0976-4b38-958c-81b15e3bc8df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.136045] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.136189] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.136296] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Deleting the datastore file [datastore2] 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.136642] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eece4885-3286-47dd-9f16-81130c7984e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.150327] env[68244]: DEBUG oslo_vmware.api [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for the task: (returnval){ [ 908.150327] env[68244]: value = "task-2780519" [ 908.150327] env[68244]: _type = "Task" [ 908.150327] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.154086] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "085b318d-e704-46f9-89a6-679b8aa49f85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.154520] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "085b318d-e704-46f9-89a6-679b8aa49f85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.154646] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "085b318d-e704-46f9-89a6-679b8aa49f85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.154895] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "085b318d-e704-46f9-89a6-679b8aa49f85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.155373] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "085b318d-e704-46f9-89a6-679b8aa49f85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.157558] env[68244]: INFO nova.compute.manager [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Terminating instance [ 908.163441] env[68244]: DEBUG oslo_vmware.api [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780519, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.209762] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780515, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.258944] env[68244]: INFO nova.compute.manager [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Took 49.29 seconds to build instance. [ 908.616659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a879ecc-5be5-4a42-a5a7-8ba6c5748aee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.624898] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbc7d27-7f74-43fa-a9e7-fb021940a452 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.658291] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa17e03-1a98-4b61-a94f-d1d3070827f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.666221] env[68244]: DEBUG nova.compute.manager [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 908.666443] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 908.666733] env[68244]: DEBUG oslo_vmware.api [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Task: {'id': task-2780519, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159694} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.669367] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93aebf52-20ea-41d5-b564-5dad0108e32d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.672036] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.672322] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.672513] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.672685] env[68244]: INFO nova.compute.manager [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 908.672916] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.673185] env[68244]: DEBUG nova.compute.manager [-] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.673282] env[68244]: DEBUG nova.network.neutron [-] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.675830] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230116f3-32be-413f-b92d-ca15cf22e6f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.684116] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.695019] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52a4a945-45f0-4785-98e8-d241efcb4006 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.695019] env[68244]: DEBUG nova.compute.provider_tree [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.699356] env[68244]: DEBUG oslo_vmware.api [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 908.699356] env[68244]: value = "task-2780520" [ 908.699356] env[68244]: _type = "Task" [ 908.699356] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.710882] env[68244]: DEBUG oslo_vmware.api [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780520, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.713920] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780515, 'name': CloneVM_Task, 'duration_secs': 1.531536} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.714129] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Created linked-clone VM from snapshot [ 908.714855] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58b7706-66bc-4464-a5df-f2201421f434 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.723216] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Uploading image 9931e952-d174-44a7-b3e9-86c757b7af30 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 908.751866] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 908.751866] env[68244]: value = "vm-559036" [ 908.751866] env[68244]: _type = "VirtualMachine" [ 908.751866] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 908.751866] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cffe7ff0-7a39-4415-8e12-8461722aade0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.759114] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lease: (returnval){ [ 908.759114] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5249b5d5-ac6e-9135-0a72-69829d72eb34" [ 908.759114] env[68244]: _type = "HttpNfcLease" [ 908.759114] env[68244]: } obtained for exporting VM: (result){ [ 908.759114] env[68244]: value = "vm-559036" [ 908.759114] env[68244]: _type = "VirtualMachine" [ 908.759114] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 908.759385] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the lease: (returnval){ [ 908.759385] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5249b5d5-ac6e-9135-0a72-69829d72eb34" [ 908.759385] env[68244]: _type = "HttpNfcLease" [ 908.759385] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 908.763196] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9321c3b7-ed2b-4b37-bbf0-ae2b7dec5589 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.519s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.766721] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 908.766721] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5249b5d5-ac6e-9135-0a72-69829d72eb34" [ 908.766721] env[68244]: _type = "HttpNfcLease" [ 908.766721] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 908.968263] env[68244]: DEBUG nova.compute.manager [req-93522308-7921-43cf-b541-80780f3b7e4f req-de92ad52-34b2-4775-b6b4-0bf4b35eebb1 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Received event network-vif-deleted-817af294-3a0f-4ead-9a86-ed1f635dc303 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 908.968466] env[68244]: INFO nova.compute.manager [req-93522308-7921-43cf-b541-80780f3b7e4f req-de92ad52-34b2-4775-b6b4-0bf4b35eebb1 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Neutron deleted interface 817af294-3a0f-4ead-9a86-ed1f635dc303; detaching it from the instance and deleting it from the info cache [ 908.968642] env[68244]: DEBUG nova.network.neutron [req-93522308-7921-43cf-b541-80780f3b7e4f req-de92ad52-34b2-4775-b6b4-0bf4b35eebb1 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.196591] env[68244]: DEBUG nova.scheduler.client.report [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.209676] env[68244]: DEBUG oslo_vmware.api [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780520, 'name': PowerOffVM_Task, 'duration_secs': 0.262623} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.209951] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.210142] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 909.210397] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-569eb3a7-c15f-488a-a9ab-0e97eadd9383 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.268674] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 909.268674] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5249b5d5-ac6e-9135-0a72-69829d72eb34" [ 909.268674] env[68244]: _type = "HttpNfcLease" [ 909.268674] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 909.269313] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 909.269313] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5249b5d5-ac6e-9135-0a72-69829d72eb34" [ 909.269313] env[68244]: _type = "HttpNfcLease" [ 909.269313] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 909.270439] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195fe66b-269b-4af3-bb74-19c23c977276 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.279434] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233fb45-0574-731f-d895-abb2de35abee/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 909.279635] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233fb45-0574-731f-d895-abb2de35abee/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 909.339023] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 909.339023] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 909.339023] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleting the datastore file [datastore2] 085b318d-e704-46f9-89a6-679b8aa49f85 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.339023] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f68003bc-b5ff-475a-b6da-b7f3a56e8e8c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.344972] env[68244]: DEBUG oslo_vmware.api [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 909.344972] env[68244]: value = "task-2780523" [ 909.344972] env[68244]: _type = "Task" [ 909.344972] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.354863] env[68244]: DEBUG oslo_vmware.api [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.371211] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f878c2e8-fb1f-4f1c-8045-f175991b5815 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.453485] env[68244]: DEBUG nova.network.neutron [-] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.471334] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96df22f8-9633-4f8d-a642-97002e9b9fe0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.481377] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25ad458-7d01-4482-8112-d8d6b6175463 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.515926] env[68244]: DEBUG nova.compute.manager [req-93522308-7921-43cf-b541-80780f3b7e4f req-de92ad52-34b2-4775-b6b4-0bf4b35eebb1 service nova] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Detach interface failed, port_id=817af294-3a0f-4ead-9a86-ed1f635dc303, reason: Instance 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 909.705673] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.505s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.706491] env[68244]: DEBUG nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 909.709742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.385s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.710561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.710642] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 909.711056] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.316s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.713429] env[68244]: INFO nova.compute.claims [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.717510] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd04f64-4462-4ee5-baa6-71c611fbd4b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.729659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976d29d2-642a-487d-899a-75ba2fa789f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.747310] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaa16b5-eab6-40a0-8d35-a2845acb2acb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.755546] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639dbd9b-62a8-4a68-aeba-adcf7beaa260 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.791767] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178610MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 909.792025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.858595] env[68244]: DEBUG oslo_vmware.api [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135817} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.858595] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.858595] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 909.858595] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 909.858595] env[68244]: INFO nova.compute.manager [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Took 1.19 seconds to destroy the instance on the hypervisor. [ 909.858595] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 909.858595] env[68244]: DEBUG nova.compute.manager [-] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 909.858595] env[68244]: DEBUG nova.network.neutron [-] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 909.925827] env[68244]: DEBUG nova.compute.manager [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 909.926943] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8ea2ee-ede6-4ece-af4a-2c86f2652fa0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.956378] env[68244]: INFO nova.compute.manager [-] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Took 1.28 seconds to deallocate network for instance. [ 910.221087] env[68244]: DEBUG nova.compute.utils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 910.222762] env[68244]: DEBUG nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 910.222958] env[68244]: DEBUG nova.network.neutron [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 910.277540] env[68244]: DEBUG nova.policy [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15da44373823445e81c47029a56aa6f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b9e47e53c1f48e593e8d7161e9e3386', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 910.439919] env[68244]: INFO nova.compute.manager [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] instance snapshotting [ 910.442865] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6c848d-6c0f-407f-83f6-b889b5fd6e08 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.463147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.464405] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf60e8f-3c12-4ac9-a6a2-61efe370cb6c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.590835] env[68244]: DEBUG nova.network.neutron [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Successfully created port: a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.622420] env[68244]: DEBUG nova.network.neutron [-] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.727179] env[68244]: DEBUG nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 910.979522] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 910.979955] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6b2d1c22-66b5-4284-8647-b8607670821d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.990022] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 910.990022] env[68244]: value = "task-2780524" [ 910.990022] env[68244]: _type = "Task" [ 910.990022] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.996654] env[68244]: DEBUG nova.compute.manager [req-03ac1db5-abf3-4742-8201-03ebd4c337aa req-21efd12f-3979-42fc-880a-95678ab9797a service nova] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Received event network-vif-deleted-87739e1f-7578-4f51-abbc-678119d483ee {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 911.004298] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780524, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.125583] env[68244]: INFO nova.compute.manager [-] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Took 1.27 seconds to deallocate network for instance. [ 911.237945] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b7c4a9-1237-4b58-90f1-82b8662b8ceb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.245369] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea56e92-3f19-4016-b989-5aff254c1750 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.277417] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf749a4-6ca6-4c8e-83fd-8ed81660522a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.285011] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92936610-df0e-4af3-a77a-5a473e774083 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.298926] env[68244]: DEBUG nova.compute.provider_tree [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.500977] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780524, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.632961] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.741968] env[68244]: DEBUG nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 911.769278] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.770534] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.770534] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.770534] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.770534] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.770534] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.770534] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.771104] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.771104] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.771104] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.771365] env[68244]: DEBUG nova.virt.hardware [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.772094] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c5c3ea-9716-4a02-bcce-fe874bf00d82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.782519] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64265aa-a54f-4c6b-b52b-0791712aef7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.801941] env[68244]: DEBUG nova.scheduler.client.report [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.001711] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780524, 'name': CreateSnapshot_Task, 'duration_secs': 0.86674} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.001997] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 912.003075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cad164-1a05-4aba-9d7b-84bba242819b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.086374] env[68244]: DEBUG nova.network.neutron [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Successfully updated port: a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.307487] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.308196] env[68244]: DEBUG nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 912.310896] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.284s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.311209] env[68244]: DEBUG nova.objects.instance [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lazy-loading 'resources' on Instance uuid 184f7694-9cab-4184-a1c0-926763a81baf {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.521648] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 912.521934] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fc8ce5c7-def3-4b4c-a725-bcb7d3f72b5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.531210] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 912.531210] env[68244]: value = "task-2780525" [ 912.531210] env[68244]: _type = "Task" [ 912.531210] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.539073] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780525, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.591174] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.591245] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquired lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.591355] env[68244]: DEBUG nova.network.neutron [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.814906] env[68244]: DEBUG nova.compute.utils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 912.819504] env[68244]: DEBUG nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 912.819698] env[68244]: DEBUG nova.network.neutron [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.873870] env[68244]: DEBUG nova.policy [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0c13e099528435296ac3827d8f52e31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2339433c10b4813937eb9968a84324a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 913.021149] env[68244]: DEBUG nova.compute.manager [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Received event network-vif-plugged-a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 913.021390] env[68244]: DEBUG oslo_concurrency.lockutils [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] Acquiring lock "92ce8150-982b-4669-b27a-4afd5c85da86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.021605] env[68244]: DEBUG oslo_concurrency.lockutils [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] Lock "92ce8150-982b-4669-b27a-4afd5c85da86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.021775] env[68244]: DEBUG oslo_concurrency.lockutils [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] Lock "92ce8150-982b-4669-b27a-4afd5c85da86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.021940] env[68244]: DEBUG nova.compute.manager [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] No waiting events found dispatching network-vif-plugged-a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 913.022483] env[68244]: WARNING nova.compute.manager [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Received unexpected event network-vif-plugged-a9cddce0-c422-4f46-a41f-feecfe3a6b8e for instance with vm_state building and task_state spawning. [ 913.022538] env[68244]: DEBUG nova.compute.manager [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Received event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 913.022953] env[68244]: DEBUG nova.compute.manager [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing instance network info cache due to event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 913.022953] env[68244]: DEBUG oslo_concurrency.lockutils [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] Acquiring lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.045299] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780525, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.133556] env[68244]: DEBUG nova.network.neutron [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.252032] env[68244]: DEBUG nova.network.neutron [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Successfully created port: 7c817571-afec-4248-8b5f-5a008e7f6141 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.320680] env[68244]: DEBUG nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 913.339502] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62badc67-92f2-4f2f-aa60-96d2544639a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.347931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080b304d-6657-4de6-81a7-784136829114 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.382307] env[68244]: DEBUG nova.network.neutron [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.384631] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b5f117-0e41-4d87-8f72-4e73dea36ced {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.394270] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701b8e1e-b78d-4af1-b3de-7f8c5f0a177f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.411595] env[68244]: DEBUG nova.compute.provider_tree [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.542389] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780525, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.889207] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Releasing lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.889554] env[68244]: DEBUG nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Instance network_info: |[{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 913.890650] env[68244]: DEBUG oslo_concurrency.lockutils [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] Acquired lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.890895] env[68244]: DEBUG nova.network.neutron [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.892063] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:ca:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '30c39e9a-a798-4f25-a48c-91f786ba332c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9cddce0-c422-4f46-a41f-feecfe3a6b8e', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.900065] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Creating folder: Project (1b9e47e53c1f48e593e8d7161e9e3386). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.901198] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecefbaee-c38e-4a3a-b787-09f886f44876 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.913519] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Created folder: Project (1b9e47e53c1f48e593e8d7161e9e3386) in parent group-v558876. [ 913.913719] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Creating folder: Instances. Parent ref: group-v559039. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 913.913953] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80eabbba-4e0b-442f-bca3-643be339c0d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.916845] env[68244]: DEBUG nova.scheduler.client.report [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.923851] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Created folder: Instances in parent group-v559039. [ 913.924096] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.924282] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.924485] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc578ea0-1f30-4a90-988c-b3608358c56f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.946402] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.946402] env[68244]: value = "task-2780528" [ 913.946402] env[68244]: _type = "Task" [ 913.946402] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.954381] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780528, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.042723] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780525, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.332805] env[68244]: DEBUG nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 914.357549] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 914.357910] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.358094] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 914.358282] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.358431] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 914.358571] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 914.358772] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 914.358930] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 914.359108] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 914.359271] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 914.359443] env[68244]: DEBUG nova.virt.hardware [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 914.360344] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc7dda0-ba62-4bd9-84ed-9fa22967da4f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.368287] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d50ea3-3b15-4029-815e-e570ea2ac67e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.421449] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.423972] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.381s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.424251] env[68244]: DEBUG nova.objects.instance [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lazy-loading 'resources' on Instance uuid 874d6895-0f3d-4a99-b27a-cad627ddeecd {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.447674] env[68244]: INFO nova.scheduler.client.report [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted allocations for instance 184f7694-9cab-4184-a1c0-926763a81baf [ 914.461492] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780528, 'name': CreateVM_Task, 'duration_secs': 0.454214} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.461696] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.462485] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.464683] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.464683] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.464683] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da02bf2f-02aa-4ff6-911a-6668cdc1e4f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.468639] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 914.468639] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527d65e8-47ef-15c5-343e-ffa3b6d8e91c" [ 914.468639] env[68244]: _type = "Task" [ 914.468639] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.479544] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527d65e8-47ef-15c5-343e-ffa3b6d8e91c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.543908] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780525, 'name': CloneVM_Task, 'duration_secs': 1.729055} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.544323] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Created linked-clone VM from snapshot [ 914.545199] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de51c4ad-4424-469a-99d0-cdaa18b429ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.553520] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Uploading image 65d86da3-59fb-4ec7-873b-2525143225e1 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 914.578244] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 914.578244] env[68244]: value = "vm-559038" [ 914.578244] env[68244]: _type = "VirtualMachine" [ 914.578244] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 914.578568] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ea528026-9053-4c05-9c85-0f58c4a1c35e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.586516] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease: (returnval){ [ 914.586516] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528d500b-75a5-956c-27d7-c2b31523d55a" [ 914.586516] env[68244]: _type = "HttpNfcLease" [ 914.586516] env[68244]: } obtained for exporting VM: (result){ [ 914.586516] env[68244]: value = "vm-559038" [ 914.586516] env[68244]: _type = "VirtualMachine" [ 914.586516] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 914.586911] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the lease: (returnval){ [ 914.586911] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528d500b-75a5-956c-27d7-c2b31523d55a" [ 914.586911] env[68244]: _type = "HttpNfcLease" [ 914.586911] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 914.594109] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 914.594109] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528d500b-75a5-956c-27d7-c2b31523d55a" [ 914.594109] env[68244]: _type = "HttpNfcLease" [ 914.594109] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 914.690731] env[68244]: DEBUG nova.network.neutron [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updated VIF entry in instance network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.691258] env[68244]: DEBUG nova.network.neutron [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.841462] env[68244]: DEBUG nova.network.neutron [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Successfully updated port: 7c817571-afec-4248-8b5f-5a008e7f6141 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.957678] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3212b6c-5b23-4ae7-90c4-9c3ab7ce78a7 tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "184f7694-9cab-4184-a1c0-926763a81baf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.728s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.982766] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527d65e8-47ef-15c5-343e-ffa3b6d8e91c, 'name': SearchDatastore_Task, 'duration_secs': 0.012627} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.983258] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.983328] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.983695] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.983695] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.983872] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.984675] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96d11cb8-c7bc-4d63-a07e-43ee2b043d39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.997557] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.997771] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.998592] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d71c0cc8-d474-4f9f-90ac-3e35b467b111 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.007275] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 915.007275] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281f3d3-95bb-5037-1be7-ab90d0f5be8e" [ 915.007275] env[68244]: _type = "Task" [ 915.007275] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.015099] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281f3d3-95bb-5037-1be7-ab90d0f5be8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.051948] env[68244]: DEBUG nova.compute.manager [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Received event network-vif-plugged-7c817571-afec-4248-8b5f-5a008e7f6141 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 915.052291] env[68244]: DEBUG oslo_concurrency.lockutils [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] Acquiring lock "91232cad-54b3-45af-bb54-af268de182fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.052495] env[68244]: DEBUG oslo_concurrency.lockutils [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] Lock "91232cad-54b3-45af-bb54-af268de182fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.052662] env[68244]: DEBUG oslo_concurrency.lockutils [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] Lock "91232cad-54b3-45af-bb54-af268de182fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.052828] env[68244]: DEBUG nova.compute.manager [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] No waiting events found dispatching network-vif-plugged-7c817571-afec-4248-8b5f-5a008e7f6141 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.052996] env[68244]: WARNING nova.compute.manager [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Received unexpected event network-vif-plugged-7c817571-afec-4248-8b5f-5a008e7f6141 for instance with vm_state building and task_state spawning. [ 915.053175] env[68244]: DEBUG nova.compute.manager [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Received event network-changed-7c817571-afec-4248-8b5f-5a008e7f6141 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 915.053329] env[68244]: DEBUG nova.compute.manager [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Refreshing instance network info cache due to event network-changed-7c817571-afec-4248-8b5f-5a008e7f6141. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 915.053509] env[68244]: DEBUG oslo_concurrency.lockutils [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] Acquiring lock "refresh_cache-91232cad-54b3-45af-bb54-af268de182fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.053643] env[68244]: DEBUG oslo_concurrency.lockutils [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] Acquired lock "refresh_cache-91232cad-54b3-45af-bb54-af268de182fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.053794] env[68244]: DEBUG nova.network.neutron [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Refreshing network info cache for port 7c817571-afec-4248-8b5f-5a008e7f6141 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.098378] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 915.098378] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528d500b-75a5-956c-27d7-c2b31523d55a" [ 915.098378] env[68244]: _type = "HttpNfcLease" [ 915.098378] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 915.098966] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 915.098966] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528d500b-75a5-956c-27d7-c2b31523d55a" [ 915.098966] env[68244]: _type = "HttpNfcLease" [ 915.098966] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 915.099907] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8946297c-e1cb-4d9e-b9a9-bba221e29f49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.110182] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f9f1da-e89d-025e-0d1c-fd6786cc8750/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 915.110182] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f9f1da-e89d-025e-0d1c-fd6786cc8750/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 915.194595] env[68244]: DEBUG oslo_concurrency.lockutils [req-590a0b3c-19ad-4fb2-b667-0da7e9e574c6 req-ebd72b86-c6f7-46cf-b508-b7b77a2a9814 service nova] Releasing lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.224711] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d1afb061-3f84-4346-bb52-0c90f9df37b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.343734] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "refresh_cache-91232cad-54b3-45af-bb54-af268de182fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.470184] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcaab22-d1d8-4c51-b972-4e20e31de4be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.478147] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4634e732-5519-4828-81ba-76ec73812b43 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.516361] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3258e47c-2a9d-4c3e-a5e9-ac82bb271d20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.526954] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5281f3d3-95bb-5037-1be7-ab90d0f5be8e, 'name': SearchDatastore_Task, 'duration_secs': 0.01071} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.530624] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bccaeac-1076-443d-8798-467480b3fdfe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.533561] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cbdfe7-47d1-4a93-8c6b-7e0bc07de1f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.543879] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 915.543879] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255ebfb-2f30-bed2-1f10-8536795f4893" [ 915.543879] env[68244]: _type = "Task" [ 915.543879] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.554196] env[68244]: DEBUG nova.compute.provider_tree [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.567453] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255ebfb-2f30-bed2-1f10-8536795f4893, 'name': SearchDatastore_Task, 'duration_secs': 0.012097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.568470] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.568736] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 92ce8150-982b-4669-b27a-4afd5c85da86/92ce8150-982b-4669-b27a-4afd5c85da86.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.569586] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3a0ab86-f8b3-4679-b5b8-a590f8a8f5e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.577402] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 915.577402] env[68244]: value = "task-2780530" [ 915.577402] env[68244]: _type = "Task" [ 915.577402] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.586463] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.604242] env[68244]: DEBUG nova.network.neutron [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.691668] env[68244]: DEBUG nova.network.neutron [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.083115] env[68244]: ERROR nova.scheduler.client.report [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] [req-663f739e-4142-4f25-9c63-2683334bf4dc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-663f739e-4142-4f25-9c63-2683334bf4dc"}]} [ 916.091854] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780530, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.101909] env[68244]: DEBUG nova.scheduler.client.report [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 916.119962] env[68244]: DEBUG nova.scheduler.client.report [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 916.120357] env[68244]: DEBUG nova.compute.provider_tree [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 916.132869] env[68244]: DEBUG nova.scheduler.client.report [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 916.158323] env[68244]: DEBUG nova.scheduler.client.report [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 916.198092] env[68244]: DEBUG oslo_concurrency.lockutils [req-3004d1f9-bb74-4362-b8e9-9953ac49a0d9 req-9275bb99-ef75-47c2-b6d5-527b6aa86c4d service nova] Releasing lock "refresh_cache-91232cad-54b3-45af-bb54-af268de182fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.198822] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired lock "refresh_cache-91232cad-54b3-45af-bb54-af268de182fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.199407] env[68244]: DEBUG nova.network.neutron [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.590907] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520987} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.591119] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 92ce8150-982b-4669-b27a-4afd5c85da86/92ce8150-982b-4669-b27a-4afd5c85da86.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.591329] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.591592] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ec78695-a2f5-4f02-81d7-242fa949ecbd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.598323] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 916.598323] env[68244]: value = "task-2780531" [ 916.598323] env[68244]: _type = "Task" [ 916.598323] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.607159] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780531, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.622561] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5fc685-f801-4578-8c25-c5957a32a1ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.632302] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8625bd-ae2b-4bbd-b3e5-3a23d0213c9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.665189] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81fe492d-758c-4d8f-bbe2-32e967190a8b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.673390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf2bb77-ad3c-47de-ac6f-efedb35fd22a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.687849] env[68244]: DEBUG nova.compute.provider_tree [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.733387] env[68244]: DEBUG nova.network.neutron [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.870270] env[68244]: DEBUG nova.network.neutron [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Updating instance_info_cache with network_info: [{"id": "7c817571-afec-4248-8b5f-5a008e7f6141", "address": "fa:16:3e:f3:e5:94", "network": {"id": "75a3fcbf-828f-44ef-a705-497aab0cf9ab", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-145153828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2339433c10b4813937eb9968a84324a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c817571-af", "ovs_interfaceid": "7c817571-afec-4248-8b5f-5a008e7f6141", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.998301] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233fb45-0574-731f-d895-abb2de35abee/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 916.999285] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65ae896-2c63-44cf-9b00-6832c8d812c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.005476] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233fb45-0574-731f-d895-abb2de35abee/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 917.005644] env[68244]: ERROR oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233fb45-0574-731f-d895-abb2de35abee/disk-0.vmdk due to incomplete transfer. [ 917.005878] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3ebc6b63-d099-4eb5-8086-401fc1b69ddc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.014025] env[68244]: DEBUG oslo_vmware.rw_handles [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233fb45-0574-731f-d895-abb2de35abee/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 917.014231] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Uploaded image 9931e952-d174-44a7-b3e9-86c757b7af30 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 917.017381] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 917.018093] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-14a72c8a-7f75-4f23-b167-1b7852f13aa5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.024614] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 917.024614] env[68244]: value = "task-2780532" [ 917.024614] env[68244]: _type = "Task" [ 917.024614] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.032780] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780532, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.108895] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780531, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109544} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.109298] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.109979] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add8bde5-7d95-49f6-8b55-bee0f1375a17 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.133176] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 92ce8150-982b-4669-b27a-4afd5c85da86/92ce8150-982b-4669-b27a-4afd5c85da86.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.133485] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbcc41b8-e79b-4ebb-a224-3b52d87d22c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.155109] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 917.155109] env[68244]: value = "task-2780533" [ 917.155109] env[68244]: _type = "Task" [ 917.155109] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.164182] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780533, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.191478] env[68244]: DEBUG nova.scheduler.client.report [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.373576] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Releasing lock "refresh_cache-91232cad-54b3-45af-bb54-af268de182fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.373923] env[68244]: DEBUG nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Instance network_info: |[{"id": "7c817571-afec-4248-8b5f-5a008e7f6141", "address": "fa:16:3e:f3:e5:94", "network": {"id": "75a3fcbf-828f-44ef-a705-497aab0cf9ab", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-145153828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2339433c10b4813937eb9968a84324a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c817571-af", "ovs_interfaceid": "7c817571-afec-4248-8b5f-5a008e7f6141", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 917.374379] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:e5:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c817571-afec-4248-8b5f-5a008e7f6141', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.382267] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 917.382516] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.382764] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dec71ebd-1ff2-4eb0-88db-244f1cbf07b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.404059] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.404059] env[68244]: value = "task-2780534" [ 917.404059] env[68244]: _type = "Task" [ 917.404059] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.412045] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780534, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.535765] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780532, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.673588] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780533, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.696531] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.272s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.699133] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.577s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.699408] env[68244]: DEBUG nova.objects.instance [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lazy-loading 'resources' on Instance uuid b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.723265] env[68244]: INFO nova.scheduler.client.report [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Deleted allocations for instance 874d6895-0f3d-4a99-b27a-cad627ddeecd [ 917.913429] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780534, 'name': CreateVM_Task, 'duration_secs': 0.374879} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.913609] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.914335] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.914498] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.914821] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.915117] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03f183ff-5c9e-4663-9af4-7649f273bebf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.919821] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 917.919821] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522c82d1-da44-48a1-5f96-75c8616686c3" [ 917.919821] env[68244]: _type = "Task" [ 917.919821] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.927413] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522c82d1-da44-48a1-5f96-75c8616686c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.035211] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780532, 'name': Destroy_Task, 'duration_secs': 0.562097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.035496] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Destroyed the VM [ 918.035745] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 918.036052] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e5c4bd67-5a90-40f3-ad4a-064f5925fc71 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.043125] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 918.043125] env[68244]: value = "task-2780535" [ 918.043125] env[68244]: _type = "Task" [ 918.043125] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.050844] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780535, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.169051] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780533, 'name': ReconfigVM_Task, 'duration_secs': 0.537517} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.169392] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 92ce8150-982b-4669-b27a-4afd5c85da86/92ce8150-982b-4669-b27a-4afd5c85da86.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.169958] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65625ab7-0242-49e1-9177-cd2107a7f6d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.175969] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 918.175969] env[68244]: value = "task-2780536" [ 918.175969] env[68244]: _type = "Task" [ 918.175969] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.183603] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780536, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.232887] env[68244]: DEBUG oslo_concurrency.lockutils [None req-763f461f-d076-4e11-b13e-a893d0699c6c tempest-MultipleCreateTestJSON-2127338032 tempest-MultipleCreateTestJSON-2127338032-project-member] Lock "874d6895-0f3d-4a99-b27a-cad627ddeecd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.847s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.430010] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522c82d1-da44-48a1-5f96-75c8616686c3, 'name': SearchDatastore_Task, 'duration_secs': 0.011277} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.432734] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.432975] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.433225] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.433372] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.433546] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.434012] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b80e1dfa-03a6-43c0-ad64-5863de330bd7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.443121] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.443181] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.446452] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0f758e0-32d0-47a8-84f1-7ef2f56c71b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.451716] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 918.451716] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527cd9db-4a6b-5f9d-0318-052db1c2a698" [ 918.451716] env[68244]: _type = "Task" [ 918.451716] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.459612] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527cd9db-4a6b-5f9d-0318-052db1c2a698, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.556889] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780535, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.637307] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc2db9c-2b6c-4180-874e-45bbcaf2ba2c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.644716] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af42654-fdf0-4849-96b3-698fdf67e7a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.677230] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f93e32-87c8-48f2-bf88-912771f26ea4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.689186] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd08c11f-1267-49cb-bd8f-ed021424ff9e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.693195] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780536, 'name': Rename_Task, 'duration_secs': 0.178835} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.693575] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.694216] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6795c34-7122-4703-83ec-649bf19aa5e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.703632] env[68244]: DEBUG nova.compute.provider_tree [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.709365] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 918.709365] env[68244]: value = "task-2780537" [ 918.709365] env[68244]: _type = "Task" [ 918.709365] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.717246] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780537, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.962788] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527cd9db-4a6b-5f9d-0318-052db1c2a698, 'name': SearchDatastore_Task, 'duration_secs': 0.009305} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.963611] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27aa863f-0010-4dcf-a7f8-291ffe802c0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.968936] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 918.968936] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520d2d4a-a605-524d-48e1-cd4baa610403" [ 918.968936] env[68244]: _type = "Task" [ 918.968936] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.977050] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520d2d4a-a605-524d-48e1-cd4baa610403, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.053549] env[68244]: DEBUG oslo_vmware.api [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780535, 'name': RemoveSnapshot_Task, 'duration_secs': 0.609814} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.053868] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 919.054112] env[68244]: INFO nova.compute.manager [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Took 14.49 seconds to snapshot the instance on the hypervisor. [ 919.207592] env[68244]: DEBUG nova.scheduler.client.report [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.220330] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780537, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.481164] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520d2d4a-a605-524d-48e1-cd4baa610403, 'name': SearchDatastore_Task, 'duration_secs': 0.011019} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.481442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.481700] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 91232cad-54b3-45af-bb54-af268de182fa/91232cad-54b3-45af-bb54-af268de182fa.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.481960] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71b00e8d-8fd4-4d18-9713-8c1710593e46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.488583] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 919.488583] env[68244]: value = "task-2780538" [ 919.488583] env[68244]: _type = "Task" [ 919.488583] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.496668] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.622894] env[68244]: DEBUG nova.compute.manager [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Found 3 images (rotation: 2) {{(pid=68244) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 919.622983] env[68244]: DEBUG nova.compute.manager [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Rotating out 1 backups {{(pid=68244) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 919.623142] env[68244]: DEBUG nova.compute.manager [None req-282807c3-0097-43a8-b9eb-8b6935368c5e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleting image b821da70-f5be-4fe6-8e00-4567c035cf0f {{(pid=68244) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 919.717547] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.018s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.719930] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.125s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.725170] env[68244]: INFO nova.compute.claims [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.730761] env[68244]: DEBUG oslo_vmware.api [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780537, 'name': PowerOnVM_Task, 'duration_secs': 0.617956} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.731069] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.731373] env[68244]: INFO nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Took 7.99 seconds to spawn the instance on the hypervisor. [ 919.732956] env[68244]: DEBUG nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 919.732956] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1e0c2c-b274-4839-946c-0d76d7f08800 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.754766] env[68244]: INFO nova.scheduler.client.report [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Deleted allocations for instance b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56 [ 920.008868] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780538, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50125} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.009271] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 91232cad-54b3-45af-bb54-af268de182fa/91232cad-54b3-45af-bb54-af268de182fa.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.009496] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.009755] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d296d75-947a-438f-8d3f-83236b055a80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.016043] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 920.016043] env[68244]: value = "task-2780539" [ 920.016043] env[68244]: _type = "Task" [ 920.016043] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.024696] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.253346] env[68244]: INFO nova.compute.manager [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Took 46.18 seconds to build instance. [ 920.272220] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9320b519-2f57-453b-884f-629c1a58b51b tempest-ServersTestMultiNic-588657114 tempest-ServersTestMultiNic-588657114-project-member] Lock "b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.902s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.527787] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068963} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.528519] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.529403] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab4eddb-cb75-4f3a-a574-1fd1a9e98bc2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.554020] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 91232cad-54b3-45af-bb54-af268de182fa/91232cad-54b3-45af-bb54-af268de182fa.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.554020] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58a22db7-6051-4fa3-b90a-0f968b159c63 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.581760] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 920.581760] env[68244]: value = "task-2780540" [ 920.581760] env[68244]: _type = "Task" [ 920.581760] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.590742] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780540, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.757173] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9473910-9777-4f77-9a99-cf0a5a97d14a tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "92ce8150-982b-4669-b27a-4afd5c85da86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.244s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.052626] env[68244]: DEBUG nova.compute.manager [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Received event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 921.053367] env[68244]: DEBUG nova.compute.manager [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing instance network info cache due to event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 921.053367] env[68244]: DEBUG oslo_concurrency.lockutils [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] Acquiring lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.053367] env[68244]: DEBUG oslo_concurrency.lockutils [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] Acquired lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.053367] env[68244]: DEBUG nova.network.neutron [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.093031] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.220440] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34415aa-5251-450a-acc6-924c22cb3950 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.233620] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c54348-a1f3-46d3-ae7c-22f055fc96cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.266561] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6f050d-fb0a-4c5f-85eb-cb1cd8ade6a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.275269] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539bf8e2-fe2a-48c0-97d6-59674908ce49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.279848] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "e8655168-1fe8-4590-90a3-2ad9438d7761" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.280130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.280350] env[68244]: DEBUG nova.compute.manager [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 921.281167] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7972aed9-0aef-4c47-ae66-bbd8b225b9e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.295424] env[68244]: DEBUG nova.compute.provider_tree [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.298941] env[68244]: DEBUG nova.compute.manager [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 921.299609] env[68244]: DEBUG nova.objects.instance [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'flavor' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.592750] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780540, 'name': ReconfigVM_Task, 'duration_secs': 0.796696} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.593032] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 91232cad-54b3-45af-bb54-af268de182fa/91232cad-54b3-45af-bb54-af268de182fa.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.593688] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4c03d63-89d7-4c89-a515-56d480e566e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.601427] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 921.601427] env[68244]: value = "task-2780541" [ 921.601427] env[68244]: _type = "Task" [ 921.601427] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.617590] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780541, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.802792] env[68244]: DEBUG nova.scheduler.client.report [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.819904] env[68244]: DEBUG nova.network.neutron [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updated VIF entry in instance network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.820367] env[68244]: DEBUG nova.network.neutron [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.122765] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780541, 'name': Rename_Task, 'duration_secs': 0.168348} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.122765] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.122765] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95ebf2f3-d2b6-4bba-b9b0-0118ae83feea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.129889] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 922.129889] env[68244]: value = "task-2780542" [ 922.129889] env[68244]: _type = "Task" [ 922.129889] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.138393] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.309640] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.310273] env[68244]: DEBUG nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 922.312917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.932s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.315539] env[68244]: INFO nova.compute.claims [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.318345] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.318882] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-198be1e2-51de-4d06-bd56-40599fe4f377 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.323108] env[68244]: DEBUG oslo_concurrency.lockutils [req-7af3464f-38b2-461c-be94-b69ca9fa12c3 req-e830142a-a006-4dba-be5b-63640efa0b3d service nova] Releasing lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.326159] env[68244]: DEBUG oslo_vmware.api [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 922.326159] env[68244]: value = "task-2780543" [ 922.326159] env[68244]: _type = "Task" [ 922.326159] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.335142] env[68244]: DEBUG oslo_vmware.api [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.639650] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780542, 'name': PowerOnVM_Task} progress is 76%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.820640] env[68244]: DEBUG nova.compute.utils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 922.822014] env[68244]: DEBUG nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 922.825399] env[68244]: DEBUG nova.network.neutron [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.839355] env[68244]: DEBUG oslo_vmware.api [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780543, 'name': PowerOffVM_Task, 'duration_secs': 0.289681} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.842542] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.842542] env[68244]: DEBUG nova.compute.manager [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.842542] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3221f5-cb1e-4f39-928d-4466506cca5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.875296] env[68244]: DEBUG nova.policy [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03702d95a6b04249beb0e4178ef5c747', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd48f74a8554407593bb2c69b3191d85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 923.142569] env[68244]: DEBUG oslo_vmware.api [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780542, 'name': PowerOnVM_Task, 'duration_secs': 0.748383} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.142830] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 923.144068] env[68244]: INFO nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Took 8.81 seconds to spawn the instance on the hypervisor. [ 923.144068] env[68244]: DEBUG nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.144068] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ec0cf3-40e4-4648-aff7-13bb48457dd9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.158659] env[68244]: DEBUG nova.network.neutron [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Successfully created port: abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.334179] env[68244]: DEBUG nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 923.359273] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d15e9991-b45f-4693-b1c3-31ae74b6e2b1 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.079s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.663035] env[68244]: INFO nova.compute.manager [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Took 46.30 seconds to build instance. [ 923.854292] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14275ca-d539-4f6b-8c33-21183849cfb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.867399] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaa5563-b01e-4048-bfff-52e3a401c623 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.905977] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1701920b-d641-451e-bb70-03abfce5972a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.916167] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e88dcb4-b151-4d3b-a03d-9111cc3f1c85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.934846] env[68244]: DEBUG nova.compute.provider_tree [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.165719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f7b18608-1a62-4824-8b25-fd2185d38995 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "91232cad-54b3-45af-bb54-af268de182fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.407s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.259622] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f9f1da-e89d-025e-0d1c-fd6786cc8750/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 924.260578] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b4ae94-1cf7-4c1c-a719-ec6c2c3dcbd8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.268458] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f9f1da-e89d-025e-0d1c-fd6786cc8750/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 924.268672] env[68244]: ERROR oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f9f1da-e89d-025e-0d1c-fd6786cc8750/disk-0.vmdk due to incomplete transfer. [ 924.268906] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bf5cefab-932d-4498-a086-8f1940e0294f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.277700] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f9f1da-e89d-025e-0d1c-fd6786cc8750/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 924.277700] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Uploaded image 65d86da3-59fb-4ec7-873b-2525143225e1 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 924.280132] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 924.280132] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4c595c2e-dbba-4985-9f68-e04f65daecab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.287040] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 924.287040] env[68244]: value = "task-2780544" [ 924.287040] env[68244]: _type = "Task" [ 924.287040] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.298184] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780544, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.344637] env[68244]: DEBUG nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 924.366427] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 924.366691] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.366849] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 924.367050] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.367212] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 924.367361] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 924.367600] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 924.367732] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 924.367899] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 924.368260] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 924.368479] env[68244]: DEBUG nova.virt.hardware [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 924.369550] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe94dc78-d64b-4558-991c-fe98adbbdfe5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.378336] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fa35ba-301a-45af-a022-13b704a19905 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.439468] env[68244]: DEBUG nova.scheduler.client.report [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.643885] env[68244]: DEBUG nova.compute.manager [req-3e05ec06-f66d-4052-9717-715ff4a82c68 req-b77edc94-bb42-46cd-bb3b-53b6aa43c06f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received event network-vif-plugged-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 924.643885] env[68244]: DEBUG oslo_concurrency.lockutils [req-3e05ec06-f66d-4052-9717-715ff4a82c68 req-b77edc94-bb42-46cd-bb3b-53b6aa43c06f service nova] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.643885] env[68244]: DEBUG oslo_concurrency.lockutils [req-3e05ec06-f66d-4052-9717-715ff4a82c68 req-b77edc94-bb42-46cd-bb3b-53b6aa43c06f service nova] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.643885] env[68244]: DEBUG oslo_concurrency.lockutils [req-3e05ec06-f66d-4052-9717-715ff4a82c68 req-b77edc94-bb42-46cd-bb3b-53b6aa43c06f service nova] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.643885] env[68244]: DEBUG nova.compute.manager [req-3e05ec06-f66d-4052-9717-715ff4a82c68 req-b77edc94-bb42-46cd-bb3b-53b6aa43c06f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] No waiting events found dispatching network-vif-plugged-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 924.643885] env[68244]: WARNING nova.compute.manager [req-3e05ec06-f66d-4052-9717-715ff4a82c68 req-b77edc94-bb42-46cd-bb3b-53b6aa43c06f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received unexpected event network-vif-plugged-abbd3e34-9461-4503-86ee-598fe02a65d3 for instance with vm_state building and task_state spawning. [ 924.737497] env[68244]: DEBUG nova.network.neutron [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Successfully updated port: abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.800831] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780544, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.946185] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.946727] env[68244]: DEBUG nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 924.949589] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.227s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.949867] env[68244]: DEBUG nova.objects.instance [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lazy-loading 'resources' on Instance uuid df4674a2-87de-4507-950a-5941fae93aab {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.083886] env[68244]: DEBUG nova.compute.manager [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Stashing vm_state: stopped {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 925.240049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.240219] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.240403] env[68244]: DEBUG nova.network.neutron [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.304650] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780544, 'name': Destroy_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.456717] env[68244]: DEBUG nova.compute.utils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 925.460116] env[68244]: DEBUG nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 925.460668] env[68244]: DEBUG nova.network.neutron [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 925.541605] env[68244]: DEBUG nova.policy [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af241607fcc641edb739e1f751073cd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'deb6252f3b22465f91a37edc798f56c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 925.608111] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.802024] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780544, 'name': Destroy_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.806623] env[68244]: DEBUG nova.network.neutron [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.919020] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5d49a7-a14a-41f3-a1f2-22575975ed84 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.927655] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14dedb6-3595-4e95-94f7-4f4216dfde8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.961631] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02055409-4201-4771-b722-1493ee8a64fc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.966634] env[68244]: DEBUG nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 925.974739] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066e63ec-a04f-47d3-b5db-577e3a242c2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.989763] env[68244]: DEBUG nova.compute.provider_tree [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.077166] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "91d45b22-7963-4615-8455-7d910a9a0fed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.077466] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "91d45b22-7963-4615-8455-7d910a9a0fed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.138707] env[68244]: DEBUG nova.network.neutron [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabbd3e34-94", "ovs_interfaceid": "abbd3e34-9461-4503-86ee-598fe02a65d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.221786] env[68244]: DEBUG nova.network.neutron [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Successfully created port: 81426ff5-2c94-4f83-8304-2344215aa381 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.303893] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780544, 'name': Destroy_Task, 'duration_secs': 1.881228} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.304189] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Destroyed the VM [ 926.304435] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 926.304697] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fc27e12a-7ca6-46aa-9c36-8126360fe2e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.311878] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 926.311878] env[68244]: value = "task-2780545" [ 926.311878] env[68244]: _type = "Task" [ 926.311878] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.320810] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780545, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.492686] env[68244]: DEBUG nova.scheduler.client.report [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.540939] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "91232cad-54b3-45af-bb54-af268de182fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.540939] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "91232cad-54b3-45af-bb54-af268de182fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.540939] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "91232cad-54b3-45af-bb54-af268de182fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.541250] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "91232cad-54b3-45af-bb54-af268de182fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.541706] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "91232cad-54b3-45af-bb54-af268de182fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.544800] env[68244]: INFO nova.compute.manager [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Terminating instance [ 926.580829] env[68244]: DEBUG nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 926.642086] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.642685] env[68244]: DEBUG nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Instance network_info: |[{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabbd3e34-94", "ovs_interfaceid": "abbd3e34-9461-4503-86ee-598fe02a65d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 926.643165] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:38:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba4f6497-e2b4-43b5-9819-6927865ae974', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abbd3e34-9461-4503-86ee-598fe02a65d3', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.652753] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating folder: Project (fd48f74a8554407593bb2c69b3191d85). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.653368] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2037d22-ecf3-4d53-8042-cbebf3e01a0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.665466] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Created folder: Project (fd48f74a8554407593bb2c69b3191d85) in parent group-v558876. [ 926.665689] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating folder: Instances. Parent ref: group-v559043. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.665993] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c963b72-fd74-4852-96e7-ece4e42fe1b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.675114] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Created folder: Instances in parent group-v559043. [ 926.675383] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.675590] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.675804] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-692fba19-0625-4f7d-9a4b-916264678afc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.702373] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.702373] env[68244]: value = "task-2780548" [ 926.702373] env[68244]: _type = "Task" [ 926.702373] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.711034] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780548, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.717461] env[68244]: DEBUG nova.compute.manager [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received event network-changed-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 926.719070] env[68244]: DEBUG nova.compute.manager [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Refreshing instance network info cache due to event network-changed-abbd3e34-9461-4503-86ee-598fe02a65d3. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 926.719070] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] Acquiring lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.719070] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] Acquired lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.719070] env[68244]: DEBUG nova.network.neutron [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Refreshing network info cache for port abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.829670] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780545, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.976736] env[68244]: DEBUG nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 927.000614] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.004318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.394s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.010143] env[68244]: INFO nova.compute.claims [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.015725] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 927.016011] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.016199] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 927.016421] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.016570] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 927.016716] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 927.016938] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 927.017364] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 927.017694] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 927.018246] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 927.018576] env[68244]: DEBUG nova.virt.hardware [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 927.020294] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed4563b-8343-4639-bbf1-b708e475203f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.030332] env[68244]: INFO nova.scheduler.client.report [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Deleted allocations for instance df4674a2-87de-4507-950a-5941fae93aab [ 927.033530] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dfc648-201c-48ba-98df-c3a58a3a6735 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.057876] env[68244]: DEBUG nova.compute.manager [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 927.057876] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.058811] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd321dde-0168-44eb-931a-ccbbd16b0b3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.067472] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.067816] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8724f59d-762a-49cf-93f9-d7ef18ac9b4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.076204] env[68244]: DEBUG oslo_vmware.api [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 927.076204] env[68244]: value = "task-2780549" [ 927.076204] env[68244]: _type = "Task" [ 927.076204] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.088537] env[68244]: DEBUG oslo_vmware.api [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.110225] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.213463] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780548, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.325459] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780545, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.547871] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6b4e2f56-d1bf-4378-996e-f4979728bcc4 tempest-InstanceActionsV221TestJSON-1336723613 tempest-InstanceActionsV221TestJSON-1336723613-project-member] Lock "df4674a2-87de-4507-950a-5941fae93aab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.634s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.584149] env[68244]: DEBUG oslo_vmware.api [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780549, 'name': PowerOffVM_Task, 'duration_secs': 0.267256} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.584469] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.584641] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.584902] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f46005d4-953b-4ae2-b2e7-8e9c20952c80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.588431] env[68244]: DEBUG nova.network.neutron [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updated VIF entry in instance network info cache for port abbd3e34-9461-4503-86ee-598fe02a65d3. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.588431] env[68244]: DEBUG nova.network.neutron [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabbd3e34-94", "ovs_interfaceid": "abbd3e34-9461-4503-86ee-598fe02a65d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.654701] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.655021] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.655344] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Deleting the datastore file [datastore2] 91232cad-54b3-45af-bb54-af268de182fa {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.655652] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e478473e-0f1b-4df2-a6ad-2e4b1272b6e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.662652] env[68244]: DEBUG oslo_vmware.api [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 927.662652] env[68244]: value = "task-2780551" [ 927.662652] env[68244]: _type = "Task" [ 927.662652] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.675672] env[68244]: DEBUG oslo_vmware.api [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.710723] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780548, 'name': CreateVM_Task, 'duration_secs': 0.584383} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.710921] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.711655] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.711803] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.712200] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 927.712489] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9085ac9-845b-49a5-905e-c9c25c782e78 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.717704] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 927.717704] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5292d206-abe9-4e03-471f-609d5d3c2931" [ 927.717704] env[68244]: _type = "Task" [ 927.717704] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.727360] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5292d206-abe9-4e03-471f-609d5d3c2931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.830072] env[68244]: DEBUG oslo_vmware.api [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780545, 'name': RemoveSnapshot_Task, 'duration_secs': 1.472558} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.830072] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 927.830072] env[68244]: INFO nova.compute.manager [None req-8376bb10-9f40-4290-a928-7449ca5a9eaa tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Took 17.39 seconds to snapshot the instance on the hypervisor. [ 928.018890] env[68244]: DEBUG nova.network.neutron [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Successfully updated port: 81426ff5-2c94-4f83-8304-2344215aa381 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.091947] env[68244]: DEBUG oslo_concurrency.lockutils [req-8e946798-7efd-4504-b96a-3a3ad3fa5a59 req-b073b59b-bca8-47aa-843a-c8a830d3aa54 service nova] Releasing lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.179383] env[68244]: DEBUG oslo_vmware.api [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303497} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.179383] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.179383] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.179383] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.179383] env[68244]: INFO nova.compute.manager [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 928.179383] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.179383] env[68244]: DEBUG nova.compute.manager [-] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 928.179383] env[68244]: DEBUG nova.network.neutron [-] [instance: 91232cad-54b3-45af-bb54-af268de182fa] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.235526] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5292d206-abe9-4e03-471f-609d5d3c2931, 'name': SearchDatastore_Task, 'duration_secs': 0.021732} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.235844] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.236097] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.236338] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.236486] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.236663] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.236931] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2f82566-6c4c-49e5-b536-065245a618d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.250471] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.250678] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.251439] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d71ccce-6d6f-4d9d-ab7b-05f14f808499 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.256480] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 928.256480] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527af4ac-bc45-45fd-a781-ba1730953cff" [ 928.256480] env[68244]: _type = "Task" [ 928.256480] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.267779] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527af4ac-bc45-45fd-a781-ba1730953cff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.522925] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "refresh_cache-b50ed409-296a-4b6d-81d2-f8cfc24de24e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.523185] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquired lock "refresh_cache-b50ed409-296a-4b6d-81d2-f8cfc24de24e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.523351] env[68244]: DEBUG nova.network.neutron [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.607725] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c95ed0-542d-4502-b480-cc11a9e4cb5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.615938] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fe73e1-89dc-4588-9b5d-993e64828854 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.646379] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703ed76d-619e-49c6-85f4-e587d71aebd4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.654443] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805d6820-d64f-4491-9ac0-05a30b36714a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.668315] env[68244]: DEBUG nova.compute.provider_tree [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.767601] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527af4ac-bc45-45fd-a781-ba1730953cff, 'name': SearchDatastore_Task, 'duration_secs': 0.009704} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.768402] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa76d684-ca27-4788-bee4-6cf354c9bea9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.773654] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 928.773654] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217f0f6-7d0d-cec2-ee05-1f309e87bc79" [ 928.773654] env[68244]: _type = "Task" [ 928.773654] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.783607] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217f0f6-7d0d-cec2-ee05-1f309e87bc79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.797595] env[68244]: DEBUG nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Received event network-vif-plugged-81426ff5-2c94-4f83-8304-2344215aa381 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 928.797810] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] Acquiring lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.798038] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.798274] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.798373] env[68244]: DEBUG nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] No waiting events found dispatching network-vif-plugged-81426ff5-2c94-4f83-8304-2344215aa381 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 928.798527] env[68244]: WARNING nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Received unexpected event network-vif-plugged-81426ff5-2c94-4f83-8304-2344215aa381 for instance with vm_state building and task_state spawning. [ 928.798740] env[68244]: DEBUG nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Received event network-changed-81426ff5-2c94-4f83-8304-2344215aa381 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 928.798822] env[68244]: DEBUG nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Refreshing instance network info cache due to event network-changed-81426ff5-2c94-4f83-8304-2344215aa381. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 928.799014] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] Acquiring lock "refresh_cache-b50ed409-296a-4b6d-81d2-f8cfc24de24e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.945246] env[68244]: DEBUG nova.network.neutron [-] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.069967] env[68244]: DEBUG nova.network.neutron [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.172384] env[68244]: DEBUG nova.scheduler.client.report [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.206720] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "54b9144a-f84a-4be2-b6de-c61af436ec4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.206957] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.244858] env[68244]: DEBUG nova.network.neutron [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Updating instance_info_cache with network_info: [{"id": "81426ff5-2c94-4f83-8304-2344215aa381", "address": "fa:16:3e:f9:cd:08", "network": {"id": "f27dbb80-fbdc-4fc7-b984-fc0ced450363", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1300096041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "deb6252f3b22465f91a37edc798f56c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81426ff5-2c", "ovs_interfaceid": "81426ff5-2c94-4f83-8304-2344215aa381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.285553] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217f0f6-7d0d-cec2-ee05-1f309e87bc79, 'name': SearchDatastore_Task, 'duration_secs': 0.010225} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.287096] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.287372] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.287642] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee9ef9e4-178e-4692-895a-f2b90c851173 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.295462] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 929.295462] env[68244]: value = "task-2780552" [ 929.295462] env[68244]: _type = "Task" [ 929.295462] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.310364] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780552, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.448100] env[68244]: INFO nova.compute.manager [-] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Took 1.27 seconds to deallocate network for instance. [ 929.676358] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.676985] env[68244]: DEBUG nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 929.680427] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.662s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.682607] env[68244]: INFO nova.compute.claims [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.712017] env[68244]: DEBUG nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 929.747372] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Releasing lock "refresh_cache-b50ed409-296a-4b6d-81d2-f8cfc24de24e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.747776] env[68244]: DEBUG nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Instance network_info: |[{"id": "81426ff5-2c94-4f83-8304-2344215aa381", "address": "fa:16:3e:f9:cd:08", "network": {"id": "f27dbb80-fbdc-4fc7-b984-fc0ced450363", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1300096041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "deb6252f3b22465f91a37edc798f56c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81426ff5-2c", "ovs_interfaceid": "81426ff5-2c94-4f83-8304-2344215aa381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 929.748124] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] Acquired lock "refresh_cache-b50ed409-296a-4b6d-81d2-f8cfc24de24e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.748343] env[68244]: DEBUG nova.network.neutron [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Refreshing network info cache for port 81426ff5-2c94-4f83-8304-2344215aa381 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.751232] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:cd:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81426ff5-2c94-4f83-8304-2344215aa381', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.758679] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Creating folder: Project (deb6252f3b22465f91a37edc798f56c4). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.762109] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f45cfd8e-adca-4d36-b020-72e2e2481d07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.772943] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Created folder: Project (deb6252f3b22465f91a37edc798f56c4) in parent group-v558876. [ 929.773165] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Creating folder: Instances. Parent ref: group-v559046. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.773433] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20d06590-d27d-418c-a7b9-23e6277eee51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.783202] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Created folder: Instances in parent group-v559046. [ 929.783462] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 929.783650] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 929.783858] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b82b9bac-20a5-4979-acf8-bafae40ef7ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.812676] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780552, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468732} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.814056] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 929.814190] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 929.814956] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.814956] env[68244]: value = "task-2780555" [ 929.814956] env[68244]: _type = "Task" [ 929.814956] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.814956] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15815bd8-fbe6-46f3-86bc-d223dce61743 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.826346] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780555, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.827667] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 929.827667] env[68244]: value = "task-2780556" [ 929.827667] env[68244]: _type = "Task" [ 929.827667] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.835097] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780556, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.956685] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.052158] env[68244]: DEBUG nova.network.neutron [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Updated VIF entry in instance network info cache for port 81426ff5-2c94-4f83-8304-2344215aa381. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.052511] env[68244]: DEBUG nova.network.neutron [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Updating instance_info_cache with network_info: [{"id": "81426ff5-2c94-4f83-8304-2344215aa381", "address": "fa:16:3e:f9:cd:08", "network": {"id": "f27dbb80-fbdc-4fc7-b984-fc0ced450363", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1300096041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "deb6252f3b22465f91a37edc798f56c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81426ff5-2c", "ovs_interfaceid": "81426ff5-2c94-4f83-8304-2344215aa381", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.188351] env[68244]: DEBUG nova.compute.utils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 930.192107] env[68244]: DEBUG nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 930.192107] env[68244]: DEBUG nova.network.neutron [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.231763] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.243120] env[68244]: DEBUG nova.policy [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcb360676a0b4898a283980e7839c68a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15f251056bf64f719c7094479b569f0d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 930.326408] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780555, 'name': CreateVM_Task, 'duration_secs': 0.341647} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.326462] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.327153] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.327316] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.327626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 930.327874] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2157cbd6-69c9-4137-9f46-aae662752c34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.335408] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 930.335408] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f0941b-96db-af99-3d04-6f7fdc93092c" [ 930.335408] env[68244]: _type = "Task" [ 930.335408] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.338440] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780556, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069053} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.341271] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.342021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41f1bfb-240d-428b-806f-f405069da9cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.350034] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f0941b-96db-af99-3d04-6f7fdc93092c, 'name': SearchDatastore_Task, 'duration_secs': 0.010314} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.359123] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.359360] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.359588] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.359727] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.359900] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.368653] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.369098] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b93b0b1d-915c-4d52-8116-147bf6a2f4b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.370749] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90a4064d-d41b-4593-b109-99624a153ded {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.391552] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 930.391552] env[68244]: value = "task-2780557" [ 930.391552] env[68244]: _type = "Task" [ 930.391552] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.392737] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.392906] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.396214] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bba031f-10db-4854-8931-da03b8dfc1f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.403097] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780557, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.404344] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 930.404344] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52458402-624c-0aa3-8191-86a019301bbb" [ 930.404344] env[68244]: _type = "Task" [ 930.404344] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.412686] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52458402-624c-0aa3-8191-86a019301bbb, 'name': SearchDatastore_Task, 'duration_secs': 0.009249} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.413690] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9fdd273-c87f-4ef2-97ec-129911fb6b4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.418277] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 930.418277] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217f24c-99cc-4fd3-90fe-8215340b9312" [ 930.418277] env[68244]: _type = "Task" [ 930.418277] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.425377] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217f24c-99cc-4fd3-90fe-8215340b9312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.506114] env[68244]: DEBUG nova.network.neutron [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Successfully created port: c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 930.555718] env[68244]: DEBUG oslo_concurrency.lockutils [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] Releasing lock "refresh_cache-b50ed409-296a-4b6d-81d2-f8cfc24de24e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.555718] env[68244]: DEBUG nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Received event network-vif-deleted-7c817571-afec-4248-8b5f-5a008e7f6141 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 930.555946] env[68244]: INFO nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Neutron deleted interface 7c817571-afec-4248-8b5f-5a008e7f6141; detaching it from the instance and deleting it from the info cache [ 930.555946] env[68244]: DEBUG nova.network.neutron [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.693043] env[68244]: DEBUG nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 930.903391] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780557, 'name': ReconfigVM_Task, 'duration_secs': 0.267542} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.905990] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Reconfigured VM instance instance-0000003e to attach disk [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.906705] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a113d514-4ced-4ecd-8a53-2bef61218c11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.913320] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 930.913320] env[68244]: value = "task-2780558" [ 930.913320] env[68244]: _type = "Task" [ 930.913320] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.926407] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780558, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.931758] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5217f24c-99cc-4fd3-90fe-8215340b9312, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.932016] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.932420] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b50ed409-296a-4b6d-81d2-f8cfc24de24e/b50ed409-296a-4b6d-81d2-f8cfc24de24e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.932691] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9edcb022-f51c-4c7f-a3e2-8be62209cdfd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.942535] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 930.942535] env[68244]: value = "task-2780559" [ 930.942535] env[68244]: _type = "Task" [ 930.942535] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.950309] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780559, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.061261] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e140d699-dc0e-433a-9f83-4c195c497b1d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.070941] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2cd59d-49d5-4209-b5ac-fb9f62f38b61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.113989] env[68244]: DEBUG nova.compute.manager [req-e9378c8a-a1df-45e2-8f6c-e070f008a18e req-26e423b7-09df-49a0-b609-680a55e64cda service nova] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Detach interface failed, port_id=7c817571-afec-4248-8b5f-5a008e7f6141, reason: Instance 91232cad-54b3-45af-bb54-af268de182fa could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 931.204713] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e60281-ca9d-4b9c-8407-cdd1c2ded95d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.212913] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc311cc1-3f73-4f1b-8193-60d2cf3162ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.246469] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42e077a-0b2c-4197-a088-cb335e76b898 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.258314] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fec6089-5ebb-4974-b5d2-03eb33a5c13c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.273522] env[68244]: DEBUG nova.compute.provider_tree [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.422208] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780558, 'name': Rename_Task, 'duration_secs': 0.142385} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.422495] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 931.422742] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-053f8e53-051e-40b7-9f90-af078382b3b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.429356] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 931.429356] env[68244]: value = "task-2780560" [ 931.429356] env[68244]: _type = "Task" [ 931.429356] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.436815] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.449651] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780559, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480082} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.449872] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b50ed409-296a-4b6d-81d2-f8cfc24de24e/b50ed409-296a-4b6d-81d2-f8cfc24de24e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.450093] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.450326] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-938ff0ab-f78e-4190-a1e7-4e489182d2ee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.456952] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 931.456952] env[68244]: value = "task-2780561" [ 931.456952] env[68244]: _type = "Task" [ 931.456952] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.464070] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780561, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.710125] env[68244]: DEBUG nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 931.735224] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 931.735502] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.736075] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 931.736075] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.736075] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 931.736219] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 931.736373] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 931.736569] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 931.736749] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 931.736912] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 931.737097] env[68244]: DEBUG nova.virt.hardware [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 931.737975] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d0f609-b323-4975-9a50-5a2030b4df67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.745843] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7def03-0407-4983-8de6-df060f44e6ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.777653] env[68244]: DEBUG nova.scheduler.client.report [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.940486] env[68244]: DEBUG oslo_vmware.api [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780560, 'name': PowerOnVM_Task, 'duration_secs': 0.476065} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.940742] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 931.940876] env[68244]: INFO nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Took 7.60 seconds to spawn the instance on the hypervisor. [ 931.941023] env[68244]: DEBUG nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 931.941794] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c89e16-997d-447d-9233-0715e44fea44 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.966192] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06914} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.966453] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.967345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568cb58e-33c8-4e19-8191-6fb4b90ea4ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.973600] env[68244]: DEBUG nova.compute.manager [req-3f2c2510-018b-4e7e-835b-8ecaeccc977b req-b3cb3aa4-de0b-4b65-ba4e-ae5b0115e438 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Received event network-vif-plugged-c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 931.973729] env[68244]: DEBUG oslo_concurrency.lockutils [req-3f2c2510-018b-4e7e-835b-8ecaeccc977b req-b3cb3aa4-de0b-4b65-ba4e-ae5b0115e438 service nova] Acquiring lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.973958] env[68244]: DEBUG oslo_concurrency.lockutils [req-3f2c2510-018b-4e7e-835b-8ecaeccc977b req-b3cb3aa4-de0b-4b65-ba4e-ae5b0115e438 service nova] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.974184] env[68244]: DEBUG oslo_concurrency.lockutils [req-3f2c2510-018b-4e7e-835b-8ecaeccc977b req-b3cb3aa4-de0b-4b65-ba4e-ae5b0115e438 service nova] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.974352] env[68244]: DEBUG nova.compute.manager [req-3f2c2510-018b-4e7e-835b-8ecaeccc977b req-b3cb3aa4-de0b-4b65-ba4e-ae5b0115e438 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] No waiting events found dispatching network-vif-plugged-c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 931.974515] env[68244]: WARNING nova.compute.manager [req-3f2c2510-018b-4e7e-835b-8ecaeccc977b req-b3cb3aa4-de0b-4b65-ba4e-ae5b0115e438 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Received unexpected event network-vif-plugged-c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 for instance with vm_state building and task_state spawning. [ 931.997084] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] b50ed409-296a-4b6d-81d2-f8cfc24de24e/b50ed409-296a-4b6d-81d2-f8cfc24de24e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.998306] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abcf0e28-49c6-477b-ab91-6777b574e6b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.018091] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 932.018091] env[68244]: value = "task-2780562" [ 932.018091] env[68244]: _type = "Task" [ 932.018091] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.026534] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.028229] env[68244]: DEBUG nova.network.neutron [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Successfully updated port: c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.285068] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.285641] env[68244]: DEBUG nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 932.288323] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.811s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.290147] env[68244]: INFO nova.compute.claims [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.459926] env[68244]: INFO nova.compute.manager [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Took 49.89 seconds to build instance. [ 932.528440] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780562, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.530060] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.530201] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.530346] env[68244]: DEBUG nova.network.neutron [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.797301] env[68244]: DEBUG nova.compute.utils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 932.798596] env[68244]: DEBUG nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 932.798771] env[68244]: DEBUG nova.network.neutron [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.835785] env[68244]: DEBUG nova.policy [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ffc62f94ec3404f9b8684b5ea8fd0fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd259eb5849ec490fb550e2763d4df2a6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 932.962071] env[68244]: DEBUG oslo_concurrency.lockutils [None req-297d6724-d544-4723-8f1c-b267dfd1d3ad tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.672s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.029341] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780562, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.277840] env[68244]: DEBUG nova.network.neutron [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.305213] env[68244]: DEBUG nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 933.399046] env[68244]: DEBUG nova.network.neutron [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Successfully created port: 2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.508828] env[68244]: DEBUG nova.network.neutron [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Updating instance_info_cache with network_info: [{"id": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "address": "fa:16:3e:4d:e6:fa", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cbfafe-e9", "ovs_interfaceid": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.532896] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780562, 'name': ReconfigVM_Task, 'duration_secs': 1.123653} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.533345] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Reconfigured VM instance instance-0000003f to attach disk [datastore2] b50ed409-296a-4b6d-81d2-f8cfc24de24e/b50ed409-296a-4b6d-81d2-f8cfc24de24e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.535963] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f308eb6-5e70-46e3-94e1-45906969e6f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.543176] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 933.543176] env[68244]: value = "task-2780563" [ 933.543176] env[68244]: _type = "Task" [ 933.543176] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.554669] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780563, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.743582] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee727be-ceb0-445b-8e22-45e194e394a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.753019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95a899c-3e61-4de2-a696-f25b4e2174dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.784462] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f61491d-ef0b-4491-afce-929d0da52aa2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.792559] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2202ac-6909-4e8b-9748-ac1e7ea5f3d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.806707] env[68244]: DEBUG nova.compute.provider_tree [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.847901] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.848183] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.998725] env[68244]: DEBUG nova.compute.manager [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Received event network-changed-c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 933.998900] env[68244]: DEBUG nova.compute.manager [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Refreshing instance network info cache due to event network-changed-c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 933.999156] env[68244]: DEBUG oslo_concurrency.lockutils [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] Acquiring lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.013639] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.013953] env[68244]: DEBUG nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Instance network_info: |[{"id": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "address": "fa:16:3e:4d:e6:fa", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cbfafe-e9", "ovs_interfaceid": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 934.014247] env[68244]: DEBUG oslo_concurrency.lockutils [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] Acquired lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.014426] env[68244]: DEBUG nova.network.neutron [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Refreshing network info cache for port c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.015612] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:e6:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.023112] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 934.024208] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 934.024498] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2501cf17-db23-4bda-8f7a-ceaaa5be4fff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.050259] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.050259] env[68244]: value = "task-2780564" [ 934.050259] env[68244]: _type = "Task" [ 934.050259] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.056733] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780563, 'name': Rename_Task, 'duration_secs': 0.235975} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.057405] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 934.057681] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fb1ea82-8f4b-4f6b-99ee-7dc473a128a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.062367] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780564, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.066774] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 934.066774] env[68244]: value = "task-2780565" [ 934.066774] env[68244]: _type = "Task" [ 934.066774] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.074324] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780565, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.312654] env[68244]: DEBUG nova.scheduler.client.report [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 934.317568] env[68244]: DEBUG nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 934.347927] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 934.348237] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.348405] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 934.348610] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.348762] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 934.348922] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 934.349191] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 934.349456] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 934.349572] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 934.349755] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 934.349937] env[68244]: DEBUG nova.virt.hardware [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 934.350664] env[68244]: DEBUG nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 934.353895] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3083ab72-bff5-4c2e-a638-4dcdd673f766 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.362269] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c6ebb4-7640-4bd6-8e14-b78dc5e5183b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.562025] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780564, 'name': CreateVM_Task, 'duration_secs': 0.311795} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.563855] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 934.564558] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.564714] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.565042] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 934.565634] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9542ed42-eb6d-487c-8dde-a0b8502cd7ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.574233] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 934.574233] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5207e007-48a0-17e6-c385-cc49ab413a1e" [ 934.574233] env[68244]: _type = "Task" [ 934.574233] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.577690] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780565, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.586131] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5207e007-48a0-17e6-c385-cc49ab413a1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.785339] env[68244]: DEBUG nova.network.neutron [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Updated VIF entry in instance network info cache for port c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 934.785867] env[68244]: DEBUG nova.network.neutron [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Updating instance_info_cache with network_info: [{"id": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "address": "fa:16:3e:4d:e6:fa", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cbfafe-e9", "ovs_interfaceid": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.821260] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.821788] env[68244]: DEBUG nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 934.827513] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.128s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.882215] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.958945] env[68244]: DEBUG nova.network.neutron [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Successfully updated port: 2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.077351] env[68244]: DEBUG oslo_vmware.api [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780565, 'name': PowerOnVM_Task, 'duration_secs': 0.534437} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.077724] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.077924] env[68244]: INFO nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Took 8.10 seconds to spawn the instance on the hypervisor. [ 935.078182] env[68244]: DEBUG nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 935.081864] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdc541c-7cfd-44be-bc3d-4534a8782ee8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.089106] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5207e007-48a0-17e6-c385-cc49ab413a1e, 'name': SearchDatastore_Task, 'duration_secs': 0.014315} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.090613] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.090843] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.091084] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.091247] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.091413] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.094172] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d36973c3-e4e3-45ac-b32f-bd90866c0afe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.102854] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.103056] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 935.103805] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81b94461-f439-40f1-b569-d7d9509408da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.111941] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 935.111941] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5236bfee-3a7e-0125-d192-1b9f0fc9272f" [ 935.111941] env[68244]: _type = "Task" [ 935.111941] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.119421] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5236bfee-3a7e-0125-d192-1b9f0fc9272f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.289212] env[68244]: DEBUG oslo_concurrency.lockutils [req-b4a9ca22-3de4-4e4e-bc0f-d009c0f294e8 req-d92756c6-7467-4aca-b9b4-ae9d2bddf6d8 service nova] Releasing lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.332040] env[68244]: DEBUG nova.compute.utils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 935.336175] env[68244]: INFO nova.compute.claims [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.340409] env[68244]: DEBUG nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 935.340790] env[68244]: DEBUG nova.network.neutron [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.381532] env[68244]: DEBUG nova.policy [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fa4f9c8b0f1450788cd56a89e23d93a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a581fe596ee49c6b66f17d1ed11d120', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 935.461121] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.461307] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquired lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.461430] env[68244]: DEBUG nova.network.neutron [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.608157] env[68244]: INFO nova.compute.manager [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Took 49.25 seconds to build instance. [ 935.623219] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5236bfee-3a7e-0125-d192-1b9f0fc9272f, 'name': SearchDatastore_Task, 'duration_secs': 0.009177} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.624089] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5690cad7-9794-4331-8dab-adcfc48a01d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.629891] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 935.629891] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527fa77f-063a-63c7-94e2-61b4ff0a5731" [ 935.629891] env[68244]: _type = "Task" [ 935.629891] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.638414] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527fa77f-063a-63c7-94e2-61b4ff0a5731, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.644462] env[68244]: DEBUG nova.network.neutron [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Successfully created port: dd7d02bc-40c3-4660-9ed6-536c09bae7f4 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.842168] env[68244]: INFO nova.compute.resource_tracker [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating resource usage from migration 2764fb14-ee76-4821-a9aa-cb31716b24d6 [ 935.845071] env[68244]: DEBUG nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 935.998776] env[68244]: DEBUG nova.network.neutron [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.028246] env[68244]: DEBUG nova.compute.manager [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Received event network-vif-plugged-2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 936.028480] env[68244]: DEBUG oslo_concurrency.lockutils [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] Acquiring lock "c70fb986-8396-4f11-98c4-1ed977a23bcd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.029438] env[68244]: DEBUG oslo_concurrency.lockutils [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.029438] env[68244]: DEBUG oslo_concurrency.lockutils [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.029438] env[68244]: DEBUG nova.compute.manager [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] No waiting events found dispatching network-vif-plugged-2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 936.029438] env[68244]: WARNING nova.compute.manager [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Received unexpected event network-vif-plugged-2c0febdf-3f54-4d82-8373-cfc91569d784 for instance with vm_state building and task_state spawning. [ 936.029438] env[68244]: DEBUG nova.compute.manager [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Received event network-changed-2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 936.029699] env[68244]: DEBUG nova.compute.manager [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Refreshing instance network info cache due to event network-changed-2c0febdf-3f54-4d82-8373-cfc91569d784. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 936.029699] env[68244]: DEBUG oslo_concurrency.lockutils [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] Acquiring lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.111000] env[68244]: DEBUG oslo_concurrency.lockutils [None req-14d52be6-553d-484e-a7d1-7c39d7a5fa7e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.106s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.148686] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527fa77f-063a-63c7-94e2-61b4ff0a5731, 'name': SearchDatastore_Task, 'duration_secs': 0.036014} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.148933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.149192] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 183ac01e-82b1-470e-9e8f-a8aefb4c64c3/183ac01e-82b1-470e-9e8f-a8aefb4c64c3.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 936.149827] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35d97326-77bd-488e-b596-302a35b052bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.156394] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 936.156394] env[68244]: value = "task-2780566" [ 936.156394] env[68244]: _type = "Task" [ 936.156394] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.164108] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.171610] env[68244]: DEBUG nova.network.neutron [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Updating instance_info_cache with network_info: [{"id": "2c0febdf-3f54-4d82-8373-cfc91569d784", "address": "fa:16:3e:f4:01:2d", "network": {"id": "a4bf516c-928d-40ce-8b22-0a96481b47b4", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-544316534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d259eb5849ec490fb550e2763d4df2a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c0febdf-3f", "ovs_interfaceid": "2c0febdf-3f54-4d82-8373-cfc91569d784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.321415] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67955f64-15fa-4f87-9eea-7ad380cec5a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.331063] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47fc691-73af-45bc-aa16-db83ea98b104 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.365383] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3152d674-5511-4194-82d7-dcda89ae3dc5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.373844] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1016d92b-dae9-4adb-8b19-41ae2f719a6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.389320] env[68244]: DEBUG nova.compute.provider_tree [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.612155] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.612491] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.612816] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.612925] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.613077] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.615866] env[68244]: INFO nova.compute.manager [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Terminating instance [ 936.666214] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780566, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491913} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.666475] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 183ac01e-82b1-470e-9e8f-a8aefb4c64c3/183ac01e-82b1-470e-9e8f-a8aefb4c64c3.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.666687] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.666936] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6905a840-eb79-415e-988b-b92556dd8380 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.672937] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 936.672937] env[68244]: value = "task-2780567" [ 936.672937] env[68244]: _type = "Task" [ 936.672937] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.673402] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Releasing lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.673690] env[68244]: DEBUG nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Instance network_info: |[{"id": "2c0febdf-3f54-4d82-8373-cfc91569d784", "address": "fa:16:3e:f4:01:2d", "network": {"id": "a4bf516c-928d-40ce-8b22-0a96481b47b4", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-544316534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d259eb5849ec490fb550e2763d4df2a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c0febdf-3f", "ovs_interfaceid": "2c0febdf-3f54-4d82-8373-cfc91569d784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 936.676533] env[68244]: DEBUG oslo_concurrency.lockutils [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] Acquired lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.676715] env[68244]: DEBUG nova.network.neutron [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Refreshing network info cache for port 2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.677819] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:01:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c0febdf-3f54-4d82-8373-cfc91569d784', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.685664] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Creating folder: Project (d259eb5849ec490fb550e2763d4df2a6). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 936.686716] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f26e0661-cda3-4939-930a-45175e9af8e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.693479] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.697209] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Created folder: Project (d259eb5849ec490fb550e2763d4df2a6) in parent group-v558876. [ 936.697391] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Creating folder: Instances. Parent ref: group-v559050. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 936.697848] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3959fd01-b64f-4d5a-b048-1cfc1a22fff4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.707175] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Created folder: Instances in parent group-v559050. [ 936.707420] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.707615] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.707811] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2beda2c1-530c-4acc-9eee-5ea78afb422a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.727346] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.727346] env[68244]: value = "task-2780570" [ 936.727346] env[68244]: _type = "Task" [ 936.727346] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.736620] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780570, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.869815] env[68244]: DEBUG nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 936.893125] env[68244]: DEBUG nova.scheduler.client.report [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.898843] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 936.898843] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.899106] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 936.899402] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.899402] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 936.899518] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 936.899729] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 936.899893] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 936.900082] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 936.900275] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 936.900584] env[68244]: DEBUG nova.virt.hardware [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 936.901478] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30568a47-b493-451a-aea3-d558046eab80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.909689] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be535406-fad4-443c-b923-871affe2f72f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.099318] env[68244]: DEBUG nova.network.neutron [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Successfully updated port: dd7d02bc-40c3-4660-9ed6-536c09bae7f4 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.120831] env[68244]: DEBUG nova.compute.manager [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 937.121169] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.122239] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbd9964-63cf-4d9d-8b0a-b9f0e7d9d378 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.130596] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.130834] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b46c51f-09b5-49d1-9760-a18aa0af4441 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.137982] env[68244]: DEBUG oslo_vmware.api [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 937.137982] env[68244]: value = "task-2780571" [ 937.137982] env[68244]: _type = "Task" [ 937.137982] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.147418] env[68244]: DEBUG oslo_vmware.api [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.182606] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.182868] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.183643] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afbb511-0ebc-4b8d-8c25-2bef7d2b460b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.206094] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 183ac01e-82b1-470e-9e8f-a8aefb4c64c3/183ac01e-82b1-470e-9e8f-a8aefb4c64c3.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.206678] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4fe4355-4bd1-4d4b-becb-fd4d9477f3a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.226385] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 937.226385] env[68244]: value = "task-2780572" [ 937.226385] env[68244]: _type = "Task" [ 937.226385] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.237645] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780572, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.240416] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780570, 'name': CreateVM_Task, 'duration_secs': 0.312699} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.240467] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 937.241526] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.241526] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.241641] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 937.243888] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c836bb7-267c-43d4-91d4-bfdd3a518d65 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.248918] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 937.248918] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a047db-31e5-abe0-4013-a79ee24dd2e1" [ 937.248918] env[68244]: _type = "Task" [ 937.248918] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.257098] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a047db-31e5-abe0-4013-a79ee24dd2e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.405780] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.578s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.406059] env[68244]: INFO nova.compute.manager [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Migrating [ 937.412509] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.530s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.413899] env[68244]: INFO nova.compute.claims [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.418018] env[68244]: DEBUG nova.network.neutron [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Updated VIF entry in instance network info cache for port 2c0febdf-3f54-4d82-8373-cfc91569d784. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 937.418372] env[68244]: DEBUG nova.network.neutron [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Updating instance_info_cache with network_info: [{"id": "2c0febdf-3f54-4d82-8373-cfc91569d784", "address": "fa:16:3e:f4:01:2d", "network": {"id": "a4bf516c-928d-40ce-8b22-0a96481b47b4", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-544316534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d259eb5849ec490fb550e2763d4df2a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c0febdf-3f", "ovs_interfaceid": "2c0febdf-3f54-4d82-8373-cfc91569d784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.602093] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "refresh_cache-4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.602202] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "refresh_cache-4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.602400] env[68244]: DEBUG nova.network.neutron [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.649447] env[68244]: DEBUG oslo_vmware.api [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780571, 'name': PowerOffVM_Task, 'duration_secs': 0.463209} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.649738] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.649860] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.650678] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a124b3a-cdcd-452a-918b-d679c64e2e95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.736530] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.758342] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a047db-31e5-abe0-4013-a79ee24dd2e1, 'name': SearchDatastore_Task, 'duration_secs': 0.013931} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.758471] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.758631] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.758859] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.759020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.759226] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.759482] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1824e8a9-b985-4c8a-a243-0739c5dbcba3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.768422] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.768597] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 937.769372] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee5f2f2a-665d-4a1f-a287-b1c53b9a0f34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.774655] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 937.774655] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d6fbd8-5a5b-da10-c2ee-c727ec7e55fb" [ 937.774655] env[68244]: _type = "Task" [ 937.774655] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.782843] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d6fbd8-5a5b-da10-c2ee-c727ec7e55fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.784192] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.784412] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.784569] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Deleting the datastore file [datastore2] b50ed409-296a-4b6d-81d2-f8cfc24de24e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.784806] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3f9a52d-7553-44f5-a9fd-6af867d55c02 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.793027] env[68244]: DEBUG oslo_vmware.api [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for the task: (returnval){ [ 937.793027] env[68244]: value = "task-2780574" [ 937.793027] env[68244]: _type = "Task" [ 937.793027] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.800712] env[68244]: DEBUG oslo_vmware.api [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.926244] env[68244]: DEBUG oslo_concurrency.lockutils [req-4786098e-015b-4b5f-9b3a-06d13953dc74 req-e6a29000-fabc-454f-9559-2b8ae6d7ecea service nova] Releasing lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.928962] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.929181] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.929341] env[68244]: DEBUG nova.network.neutron [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.052324] env[68244]: DEBUG nova.compute.manager [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Received event network-vif-plugged-dd7d02bc-40c3-4660-9ed6-536c09bae7f4 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 938.052578] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] Acquiring lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.052795] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.052963] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.053144] env[68244]: DEBUG nova.compute.manager [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] No waiting events found dispatching network-vif-plugged-dd7d02bc-40c3-4660-9ed6-536c09bae7f4 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 938.053383] env[68244]: WARNING nova.compute.manager [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Received unexpected event network-vif-plugged-dd7d02bc-40c3-4660-9ed6-536c09bae7f4 for instance with vm_state building and task_state spawning. [ 938.053473] env[68244]: DEBUG nova.compute.manager [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Received event network-changed-dd7d02bc-40c3-4660-9ed6-536c09bae7f4 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 938.053629] env[68244]: DEBUG nova.compute.manager [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Refreshing instance network info cache due to event network-changed-dd7d02bc-40c3-4660-9ed6-536c09bae7f4. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 938.053789] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] Acquiring lock "refresh_cache-4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.133489] env[68244]: DEBUG nova.network.neutron [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.239518] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780572, 'name': ReconfigVM_Task, 'duration_secs': 0.979654} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.239860] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 183ac01e-82b1-470e-9e8f-a8aefb4c64c3/183ac01e-82b1-470e-9e8f-a8aefb4c64c3.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.240587] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccf5ec1c-b065-4576-8906-ee164249f6ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.247055] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 938.247055] env[68244]: value = "task-2780575" [ 938.247055] env[68244]: _type = "Task" [ 938.247055] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.255088] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780575, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.262635] env[68244]: DEBUG nova.network.neutron [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Updating instance_info_cache with network_info: [{"id": "dd7d02bc-40c3-4660-9ed6-536c09bae7f4", "address": "fa:16:3e:2e:29:36", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd7d02bc-40", "ovs_interfaceid": "dd7d02bc-40c3-4660-9ed6-536c09bae7f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.286545] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d6fbd8-5a5b-da10-c2ee-c727ec7e55fb, 'name': SearchDatastore_Task, 'duration_secs': 0.019341} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.287434] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-342c73ed-4c50-4ef4-a92e-de7ccb2719a0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.293157] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 938.293157] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52593575-dcbb-2b80-4b30-b162db9fe25c" [ 938.293157] env[68244]: _type = "Task" [ 938.293157] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.306505] env[68244]: DEBUG oslo_vmware.api [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.309591] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52593575-dcbb-2b80-4b30-b162db9fe25c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.623052] env[68244]: DEBUG nova.network.neutron [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance_info_cache with network_info: [{"id": "97fdf60d-e090-463d-ae82-229571208a74", "address": "fa:16:3e:0e:67:3c", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97fdf60d-e0", "ovs_interfaceid": "97fdf60d-e090-463d-ae82-229571208a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.756925] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780575, 'name': Rename_Task, 'duration_secs': 0.188086} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.759290] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.759691] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0cbb561f-575b-4bc9-b577-06b210e9303c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.764724] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "refresh_cache-4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.765112] env[68244]: DEBUG nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Instance network_info: |[{"id": "dd7d02bc-40c3-4660-9ed6-536c09bae7f4", "address": "fa:16:3e:2e:29:36", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd7d02bc-40", "ovs_interfaceid": "dd7d02bc-40c3-4660-9ed6-536c09bae7f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 938.765392] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] Acquired lock "refresh_cache-4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.765565] env[68244]: DEBUG nova.network.neutron [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Refreshing network info cache for port dd7d02bc-40c3-4660-9ed6-536c09bae7f4 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.766883] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:29:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd7d02bc-40c3-4660-9ed6-536c09bae7f4', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 938.774016] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.778124] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.778672] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 938.778672] env[68244]: value = "task-2780576" [ 938.778672] env[68244]: _type = "Task" [ 938.778672] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.779101] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aed8694d-378d-4d00-99e0-b08bbe29b14a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.806813] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.806813] env[68244]: value = "task-2780577" [ 938.806813] env[68244]: _type = "Task" [ 938.806813] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.812562] env[68244]: DEBUG oslo_vmware.api [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Task: {'id': task-2780574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.516874} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.813109] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780576, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.821056] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.821249] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.821434] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.821649] env[68244]: INFO nova.compute.manager [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Took 1.70 seconds to destroy the instance on the hypervisor. [ 938.821915] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.822155] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52593575-dcbb-2b80-4b30-b162db9fe25c, 'name': SearchDatastore_Task, 'duration_secs': 0.014516} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.822577] env[68244]: DEBUG nova.compute.manager [-] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.822678] env[68244]: DEBUG nova.network.neutron [-] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.824318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.824568] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] c70fb986-8396-4f11-98c4-1ed977a23bcd/c70fb986-8396-4f11-98c4-1ed977a23bcd.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 938.825137] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0581e76-6b0b-4681-a77e-68d131e32522 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.830498] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780577, 'name': CreateVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.840120] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 938.840120] env[68244]: value = "task-2780578" [ 938.840120] env[68244]: _type = "Task" [ 938.840120] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.850616] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.867881] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b731f9f-a868-4449-b85d-60276fb878c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.876379] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09d24af-7999-41bb-a37c-29ffc8f2a5ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.912308] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06511cd0-67d1-4bad-9997-60d04b2ea1fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.920212] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4c03d9-8c1e-44f2-b7e9-813abf2dabff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.935206] env[68244]: DEBUG nova.compute.provider_tree [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.125975] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.303532] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780576, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.324910] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780577, 'name': CreateVM_Task, 'duration_secs': 0.363917} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.325141] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.325943] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.326161] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.326538] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 939.326864] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40feb4f6-9618-4fc8-90bb-321b3be47ea3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.332383] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 939.332383] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52857040-8514-d98b-91ef-911a734e5e62" [ 939.332383] env[68244]: _type = "Task" [ 939.332383] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.343097] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52857040-8514-d98b-91ef-911a734e5e62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.351894] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780578, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.438562] env[68244]: DEBUG nova.scheduler.client.report [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.508472] env[68244]: DEBUG nova.network.neutron [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Updated VIF entry in instance network info cache for port dd7d02bc-40c3-4660-9ed6-536c09bae7f4. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.509568] env[68244]: DEBUG nova.network.neutron [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Updating instance_info_cache with network_info: [{"id": "dd7d02bc-40c3-4660-9ed6-536c09bae7f4", "address": "fa:16:3e:2e:29:36", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd7d02bc-40", "ovs_interfaceid": "dd7d02bc-40c3-4660-9ed6-536c09bae7f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.645383] env[68244]: DEBUG nova.network.neutron [-] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.803141] env[68244]: DEBUG oslo_vmware.api [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780576, 'name': PowerOnVM_Task, 'duration_secs': 0.575804} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.803628] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.804010] env[68244]: INFO nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Took 8.09 seconds to spawn the instance on the hypervisor. [ 939.804330] env[68244]: DEBUG nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.805889] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17d14e2-262b-462b-8707-3cf01fcf4cb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.842611] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52857040-8514-d98b-91ef-911a734e5e62, 'name': SearchDatastore_Task, 'duration_secs': 0.018829} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.843659] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.843923] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.844233] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.844402] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.844590] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.847719] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02c9e7e8-ffb7-4548-83ec-c4da04bcd1fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.856636] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529305} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.856837] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] c70fb986-8396-4f11-98c4-1ed977a23bcd/c70fb986-8396-4f11-98c4-1ed977a23bcd.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.856959] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.858067] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db5bcdfd-30df-427f-994b-9bc4b9469070 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.859845] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.860024] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.861203] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a030df99-8131-4af0-8c84-2609a93e555d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.866299] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 939.866299] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525588f2-454d-001a-3a74-a4cf05529848" [ 939.866299] env[68244]: _type = "Task" [ 939.866299] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.870301] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 939.870301] env[68244]: value = "task-2780579" [ 939.870301] env[68244]: _type = "Task" [ 939.870301] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.876287] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525588f2-454d-001a-3a74-a4cf05529848, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.881092] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780579, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.946160] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.946697] env[68244]: DEBUG nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 939.949422] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.449s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.012056] env[68244]: DEBUG oslo_concurrency.lockutils [req-c8c6c28b-139a-4e75-8e0b-7addd906a7af req-920f610d-b362-42a1-9076-1804620c4669 service nova] Releasing lock "refresh_cache-4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.076760] env[68244]: DEBUG nova.compute.manager [req-bb5e9ade-9989-4ca0-b874-0bf54835016b req-6a721748-55eb-483b-b17a-9dc21e243084 service nova] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Received event network-vif-deleted-81426ff5-2c94-4f83-8304-2344215aa381 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 940.147643] env[68244]: INFO nova.compute.manager [-] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Took 1.32 seconds to deallocate network for instance. [ 940.321699] env[68244]: INFO nova.compute.manager [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Took 46.76 seconds to build instance. [ 940.380185] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780579, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069336} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.383792] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.384130] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525588f2-454d-001a-3a74-a4cf05529848, 'name': SearchDatastore_Task, 'duration_secs': 0.010555} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.384809] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af364f9-fbca-4349-9c91-ed0d306fb07e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.387675] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51efcbc5-e580-4f65-a860-b8703f10efdf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.409218] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] c70fb986-8396-4f11-98c4-1ed977a23bcd/c70fb986-8396-4f11-98c4-1ed977a23bcd.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.410965] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a9c3c5e-70e1-4443-9ad6-2236acb0e843 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.425177] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 940.425177] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b0d816-d5d5-ce95-ddac-d9100a2065d8" [ 940.425177] env[68244]: _type = "Task" [ 940.425177] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.431025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] Acquiring lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.431025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] Acquired lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.431025] env[68244]: DEBUG nova.network.neutron [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.434145] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 940.434145] env[68244]: value = "task-2780580" [ 940.434145] env[68244]: _type = "Task" [ 940.434145] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.441160] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b0d816-d5d5-ce95-ddac-d9100a2065d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009639} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.442162] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.442414] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c/4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 940.442651] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36f0ea15-498b-4e44-815f-ddeed60f6af0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.449669] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780580, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.454208] env[68244]: DEBUG nova.compute.utils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 940.458599] env[68244]: INFO nova.compute.claims [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.463896] env[68244]: DEBUG nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 940.464189] env[68244]: DEBUG nova.network.neutron [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 940.466555] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 940.466555] env[68244]: value = "task-2780581" [ 940.466555] env[68244]: _type = "Task" [ 940.466555] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.476576] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.503611] env[68244]: DEBUG nova.policy [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '918658919a8c4d4e888f9a63053e5ffe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e80cfa81cd442f9af3bf027b9059123', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 940.643989] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e335ce-a415-4f49-8c19-80dcc724cf7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.666186] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.666598] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance '2487689d-7a83-49d7-be78-fbb946ebef8c' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 940.810537] env[68244]: DEBUG nova.network.neutron [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Successfully created port: 2f9884c1-2165-4c14-9160-e16fbee901e8 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 940.825617] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8a26461e-d051-449f-ab44-8f0c99659755 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.276s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.948263] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780580, 'name': ReconfigVM_Task, 'duration_secs': 0.317771} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.948263] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Reconfigured VM instance instance-00000041 to attach disk [datastore1] c70fb986-8396-4f11-98c4-1ed977a23bcd/c70fb986-8396-4f11-98c4-1ed977a23bcd.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.948263] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f1f3ed7-a0ba-429c-8a95-2834c66bdfaf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.956020] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 940.956020] env[68244]: value = "task-2780582" [ 940.956020] env[68244]: _type = "Task" [ 940.956020] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.965128] env[68244]: INFO nova.compute.resource_tracker [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating resource usage from migration 922aca6a-3d71-4286-8137-d883b7d4f8e4 [ 940.968518] env[68244]: DEBUG nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 940.971711] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780582, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.987921] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780581, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495719} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.991411] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c/4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.991969] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.992515] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddc2002c-7c29-41cf-bab5-5c42c57b89bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.001583] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 941.001583] env[68244]: value = "task-2780583" [ 941.001583] env[68244]: _type = "Task" [ 941.001583] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.012631] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780583, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.174560] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.175065] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e745198-7f42-48d7-ae2e-475e9687eb92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.181734] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 941.181734] env[68244]: value = "task-2780584" [ 941.181734] env[68244]: _type = "Task" [ 941.181734] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.192993] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.232094] env[68244]: DEBUG nova.network.neutron [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Updating instance_info_cache with network_info: [{"id": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "address": "fa:16:3e:4d:e6:fa", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cbfafe-e9", "ovs_interfaceid": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.464323] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780582, 'name': Rename_Task, 'duration_secs': 0.175835} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.466910] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 941.467399] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-175021d4-0f96-4dce-a54f-5681095e1e61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.474293] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 941.474293] env[68244]: value = "task-2780585" [ 941.474293] env[68244]: _type = "Task" [ 941.474293] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.481681] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d09cff-6ada-4638-8051-51e3c3a64cfa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.491039] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780585, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.492523] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e568c0-7397-4c97-bbaf-8789b814438e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.527307] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6883857c-18a2-4e92-ba57-ba987257359b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.535145] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780583, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063799} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.537206] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.537973] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01959fb8-e7fe-4b93-8e4c-cbe3753da962 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.541289] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282c3a27-2a99-4b63-8bea-88d8e6b127d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.555409] env[68244]: DEBUG nova.compute.provider_tree [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.574123] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c/4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.575837] env[68244]: DEBUG nova.scheduler.client.report [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.579053] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbdc7fb4-aca2-4c98-9f09-ca4f5a7d381e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.594625] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 1.645s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.594821] env[68244]: INFO nova.compute.manager [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Migrating [ 941.603143] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.837s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.604574] env[68244]: INFO nova.compute.claims [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.617818] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 941.617818] env[68244]: value = "task-2780586" [ 941.617818] env[68244]: _type = "Task" [ 941.617818] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.625649] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.692092] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780584, 'name': PowerOffVM_Task, 'duration_secs': 0.227901} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.692236] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.692422] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance '2487689d-7a83-49d7-be78-fbb946ebef8c' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 941.734016] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] Releasing lock "refresh_cache-183ac01e-82b1-470e-9e8f-a8aefb4c64c3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.734153] env[68244]: DEBUG nova.compute.manager [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Inject network info {{(pid=68244) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 941.734393] env[68244]: DEBUG nova.compute.manager [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] network_info to inject: |[{"id": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "address": "fa:16:3e:4d:e6:fa", "network": {"id": "072e6a9b-6d5d-411a-85d6-10d1e30d911e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1734151782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15f251056bf64f719c7094479b569f0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7cbfafe-e9", "ovs_interfaceid": "c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 941.739745] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Reconfiguring VM instance to set the machine id {{(pid=68244) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 941.739976] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a72ef255-5835-4797-bb6c-9b43ca7a915f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.757553] env[68244]: DEBUG oslo_vmware.api [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] Waiting for the task: (returnval){ [ 941.757553] env[68244]: value = "task-2780587" [ 941.757553] env[68244]: _type = "Task" [ 941.757553] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.765989] env[68244]: DEBUG oslo_vmware.api [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] Task: {'id': task-2780587, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.987961] env[68244]: DEBUG nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 941.990371] env[68244]: DEBUG oslo_vmware.api [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780585, 'name': PowerOnVM_Task, 'duration_secs': 0.507482} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.991027] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.991157] env[68244]: INFO nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Took 7.67 seconds to spawn the instance on the hypervisor. [ 941.991362] env[68244]: DEBUG nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 941.992290] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674eefce-90b5-4bc1-a699-1ab184b2fcba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.080511] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.080860] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.080860] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.081086] env[68244]: DEBUG nova.virt.hardware [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.082116] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00010b81-cdf9-49e5-80e7-1c7ad8a8e6c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.090886] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0fb88f-b0e2-4e2f-967a-b5c779297a0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.114596] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.114838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.115049] env[68244]: DEBUG nova.network.neutron [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 942.126660] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780586, 'name': ReconfigVM_Task, 'duration_secs': 0.339104} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.126924] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c/4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.127558] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99a53140-7574-4524-88cf-0bf0581f0dba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.135697] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 942.135697] env[68244]: value = "task-2780588" [ 942.135697] env[68244]: _type = "Task" [ 942.135697] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.145525] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780588, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.201042] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.201312] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.201471] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.201653] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.201799] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.201948] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.202318] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.202559] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.202789] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.203023] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.203270] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.208325] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad2c8b11-9762-408d-b675-797d9c6c1539 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.225171] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 942.225171] env[68244]: value = "task-2780589" [ 942.225171] env[68244]: _type = "Task" [ 942.225171] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.236333] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780589, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.267582] env[68244]: DEBUG oslo_vmware.api [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] Task: {'id': task-2780587, 'name': ReconfigVM_Task, 'duration_secs': 0.233445} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.267873] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c627a560-972d-47de-8640-c69e1ac1a2b7 tempest-ServersAdminTestJSON-1989378059 tempest-ServersAdminTestJSON-1989378059-project-admin] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Reconfigured VM instance to set the machine id {{(pid=68244) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 942.449033] env[68244]: DEBUG nova.network.neutron [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Successfully updated port: 2f9884c1-2165-4c14-9160-e16fbee901e8 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 942.512029] env[68244]: INFO nova.compute.manager [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Took 48.51 seconds to build instance. [ 942.652267] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780588, 'name': Rename_Task, 'duration_secs': 0.153013} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.654887] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.655188] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd786e4a-0153-45fd-ba83-465e503f2fed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.661837] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 942.661837] env[68244]: value = "task-2780590" [ 942.661837] env[68244]: _type = "Task" [ 942.661837] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.672541] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.694969] env[68244]: DEBUG nova.compute.manager [req-d1fa65ba-a894-4500-94ba-feab4a0831db req-e4efbf44-1598-4b34-ab9d-93adb76c5a74 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Received event network-vif-plugged-2f9884c1-2165-4c14-9160-e16fbee901e8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 942.695195] env[68244]: DEBUG oslo_concurrency.lockutils [req-d1fa65ba-a894-4500-94ba-feab4a0831db req-e4efbf44-1598-4b34-ab9d-93adb76c5a74 service nova] Acquiring lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.695398] env[68244]: DEBUG oslo_concurrency.lockutils [req-d1fa65ba-a894-4500-94ba-feab4a0831db req-e4efbf44-1598-4b34-ab9d-93adb76c5a74 service nova] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.695564] env[68244]: DEBUG oslo_concurrency.lockutils [req-d1fa65ba-a894-4500-94ba-feab4a0831db req-e4efbf44-1598-4b34-ab9d-93adb76c5a74 service nova] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.695722] env[68244]: DEBUG nova.compute.manager [req-d1fa65ba-a894-4500-94ba-feab4a0831db req-e4efbf44-1598-4b34-ab9d-93adb76c5a74 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] No waiting events found dispatching network-vif-plugged-2f9884c1-2165-4c14-9160-e16fbee901e8 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 942.695879] env[68244]: WARNING nova.compute.manager [req-d1fa65ba-a894-4500-94ba-feab4a0831db req-e4efbf44-1598-4b34-ab9d-93adb76c5a74 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Received unexpected event network-vif-plugged-2f9884c1-2165-4c14-9160-e16fbee901e8 for instance with vm_state building and task_state spawning. [ 942.737207] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780589, 'name': ReconfigVM_Task, 'duration_secs': 0.358124} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.737576] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance '2487689d-7a83-49d7-be78-fbb946ebef8c' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 942.840341] env[68244]: DEBUG nova.network.neutron [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [{"id": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "address": "fa:16:3e:92:a5:cb", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ac021e-cd", "ovs_interfaceid": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.955752] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "refresh_cache-ce2c5992-690a-4ab4-8dc1-86d99f8ca647" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.955917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "refresh_cache-ce2c5992-690a-4ab4-8dc1-86d99f8ca647" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.956091] env[68244]: DEBUG nova.network.neutron [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.013279] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b1ba725-482e-48f7-9bfe-28292eaefc07 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.025s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.111577] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3c4709-4943-4f4c-8e89-2995a1c9d77d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.121658] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291d9a05-a8bf-4095-ac90-92ac783e5067 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.171213] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d934f98e-3c78-419f-8c1e-67d29ed9a250 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.180134] env[68244]: DEBUG oslo_vmware.api [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780590, 'name': PowerOnVM_Task, 'duration_secs': 0.494472} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.182826] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.183401] env[68244]: INFO nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Took 6.31 seconds to spawn the instance on the hypervisor. [ 943.183842] env[68244]: DEBUG nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.184460] env[68244]: INFO nova.compute.manager [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Rebuilding instance [ 943.187302] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8a94b3-126b-46cd-9520-447979cf7402 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.191606] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32341d3f-401f-4db3-9c9a-286eaba7c40b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.208716] env[68244]: DEBUG nova.compute.provider_tree [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.238739] env[68244]: DEBUG nova.compute.manager [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.239686] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e09f86-2cef-4faa-af1f-39108d7cf0be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.249034] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 943.249261] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.249615] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 943.249615] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.250201] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 943.250201] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 943.250201] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 943.250362] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 943.250491] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 943.250564] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 943.250724] env[68244]: DEBUG nova.virt.hardware [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 943.255870] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Reconfiguring VM instance instance-00000039 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 943.258844] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47c0ce2a-724e-4fb1-a171-a10d7cf3097b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.278280] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 943.278280] env[68244]: value = "task-2780591" [ 943.278280] env[68244]: _type = "Task" [ 943.278280] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.286637] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780591, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.342367] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.535481] env[68244]: DEBUG nova.network.neutron [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.699070] env[68244]: DEBUG nova.network.neutron [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Updating instance_info_cache with network_info: [{"id": "2f9884c1-2165-4c14-9160-e16fbee901e8", "address": "fa:16:3e:fa:fb:2d", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9884c1-21", "ovs_interfaceid": "2f9884c1-2165-4c14-9160-e16fbee901e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.723587] env[68244]: DEBUG nova.scheduler.client.report [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 943.735529] env[68244]: INFO nova.compute.manager [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Took 43.33 seconds to build instance. [ 943.791056] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780591, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.202626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "refresh_cache-ce2c5992-690a-4ab4-8dc1-86d99f8ca647" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.202977] env[68244]: DEBUG nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Instance network_info: |[{"id": "2f9884c1-2165-4c14-9160-e16fbee901e8", "address": "fa:16:3e:fa:fb:2d", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9884c1-21", "ovs_interfaceid": "2f9884c1-2165-4c14-9160-e16fbee901e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 944.203421] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:fb:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f9884c1-2165-4c14-9160-e16fbee901e8', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.214613] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 944.214880] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 944.215167] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c1d6d5d-1f25-4bc3-91ff-66fd4c9b5791 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.230127] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.230663] env[68244]: DEBUG nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 944.233101] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 34.441s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.237547] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f5cea56d-9fa1-484c-bef1-1ea35dd11024 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.838s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.240948] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.240948] env[68244]: value = "task-2780592" [ 944.240948] env[68244]: _type = "Task" [ 944.240948] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.249591] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780592, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.275755] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.276074] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a436e46-9728-4763-8ccb-2cb1652eb111 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.285007] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 944.285007] env[68244]: value = "task-2780593" [ 944.285007] env[68244]: _type = "Task" [ 944.285007] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.291937] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780591, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.296584] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.729618] env[68244]: DEBUG nova.compute.manager [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Received event network-changed-2f9884c1-2165-4c14-9160-e16fbee901e8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 944.729864] env[68244]: DEBUG nova.compute.manager [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Refreshing instance network info cache due to event network-changed-2f9884c1-2165-4c14-9160-e16fbee901e8. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 944.730103] env[68244]: DEBUG oslo_concurrency.lockutils [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] Acquiring lock "refresh_cache-ce2c5992-690a-4ab4-8dc1-86d99f8ca647" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.730273] env[68244]: DEBUG oslo_concurrency.lockutils [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] Acquired lock "refresh_cache-ce2c5992-690a-4ab4-8dc1-86d99f8ca647" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.730463] env[68244]: DEBUG nova.network.neutron [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Refreshing network info cache for port 2f9884c1-2165-4c14-9160-e16fbee901e8 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.736413] env[68244]: DEBUG nova.compute.utils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 944.737865] env[68244]: DEBUG nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 944.737865] env[68244]: DEBUG nova.network.neutron [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 944.757027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.757185] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.757292] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.757453] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.757618] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.764727] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780592, 'name': CreateVM_Task, 'duration_secs': 0.373729} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.765183] env[68244]: INFO nova.compute.manager [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Terminating instance [ 944.766475] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 944.767088] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.767251] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.767564] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 944.768956] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b082af22-4e25-4292-af6a-7ceec54326b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.774786] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 944.774786] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227d66e-4a41-2a71-6fb1-9106650a58f9" [ 944.774786] env[68244]: _type = "Task" [ 944.774786] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.786694] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227d66e-4a41-2a71-6fb1-9106650a58f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.795427] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780591, 'name': ReconfigVM_Task, 'duration_secs': 1.453491} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.798605] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Reconfigured VM instance instance-00000039 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 944.798977] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780593, 'name': PowerOffVM_Task, 'duration_secs': 0.232291} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.799685] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3ff030-93aa-4b74-ab2b-0e3a32c7c29a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.802133] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.802364] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.803090] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be177e2a-7882-4502-9efb-07455768f41d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.808734] env[68244]: DEBUG nova.policy [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f992a7e5cd4a4b44a4062174d9a87933', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9a14e7ecf7e42d086738ccc845abeff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 944.829826] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 2487689d-7a83-49d7-be78-fbb946ebef8c/2487689d-7a83-49d7-be78-fbb946ebef8c.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.832199] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd69abc9-13e3-44ad-abd9-23fbc65b244b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.845194] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.845814] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0425536b-8da7-4314-b3d7-601480ca88e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.853072] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 944.853072] env[68244]: value = "task-2780594" [ 944.853072] env[68244]: _type = "Task" [ 944.853072] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.859746] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b3c369-8aef-41bc-be5d-a3ab95ed2b11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.865310] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.880668] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance 'd74a0d56-8656-429c-a703-fca87e07798f' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 944.914049] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.914286] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.914467] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleting the datastore file [datastore1] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.914738] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75ba8284-0529-4f9c-8746-efda1e6594cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.921849] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 944.921849] env[68244]: value = "task-2780596" [ 944.921849] env[68244]: _type = "Task" [ 944.921849] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.930816] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.095541] env[68244]: DEBUG nova.network.neutron [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Successfully created port: 9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.252943] env[68244]: DEBUG nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 945.259345] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Applying migration context for instance 10957648-8618-4f2c-8b08-5468bca20cfc as it has an incoming, in-progress migration 708441f4-9a09-4c99-bfc8-42d73de28a7f. Migration status is error {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 945.259345] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Applying migration context for instance 2487689d-7a83-49d7-be78-fbb946ebef8c as it has an incoming, in-progress migration 2764fb14-ee76-4821-a9aa-cb31716b24d6. Migration status is migrating {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 945.259434] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Applying migration context for instance d74a0d56-8656-429c-a703-fca87e07798f as it has an incoming, in-progress migration 922aca6a-3d71-4286-8137-d883b7d4f8e4. Migration status is migrating {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 945.263835] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=68244) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 945.264941] env[68244]: INFO nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating resource usage from migration 2764fb14-ee76-4821-a9aa-cb31716b24d6 [ 945.264941] env[68244]: INFO nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating resource usage from migration 922aca6a-3d71-4286-8137-d883b7d4f8e4 [ 945.265237] env[68244]: INFO nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating resource usage from migration cf3eda49-2122-4c27-a9c6-168c1bbcf3b9 [ 945.271234] env[68244]: DEBUG nova.compute.manager [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.271495] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.272658] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895370d8-6b1d-4463-b6cf-dc90a5d26fde {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.293023] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227d66e-4a41-2a71-6fb1-9106650a58f9, 'name': SearchDatastore_Task, 'duration_secs': 0.011404} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.293023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.293023] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.293023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.293626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.293939] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.294357] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.294790] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e17ed66-3dcc-4da6-bcdb-3242793397f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.298092] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79cb165e-e37e-453a-8a7b-f14800cd9159 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.299484] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.299878] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.300139] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 2aacd21f-d664-4267-8331-d3862f43d35b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.300378] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e2099d6d-5ab7-4a3e-8034-a3b4fc422749 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 7778c027-d4af-436c-a545-aa513c0b1127 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 10957648-8618-4f2c-8b08-5468bca20cfc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f579141b-1fac-4541-99c3-07644a0a358c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 828865d7-d06a-4683-9149-987e6d9efbd9 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance b0b79f25-f97d-4d59-ae80-2f8c09201073 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 085b318d-e704-46f9-89a6-679b8aa49f85 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d46f6695-7a96-4e0b-b43a-236bcb4ec519 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ffa17045-fadf-47d7-9c3b-19d0d54de3fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 45ec526b-e9d8-4ea3-b0c8-af6da39b0158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance cedcff81-0010-4fa6-95bf-72a4dcac5427 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance c73d39d9-1fb7-4ce7-8d60-9243bd6f519f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 92ce8150-982b-4669-b27a-4afd5c85da86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.302061] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 91232cad-54b3-45af-bb54-af268de182fa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ed5b8ba3-c8f0-468f-85d1-f36179bfef32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance b50ed409-296a-4b6d-81d2-f8cfc24de24e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 183ac01e-82b1-470e-9e8f-a8aefb4c64c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance c70fb986-8396-4f11-98c4-1ed977a23bcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Migration 2764fb14-ee76-4821-a9aa-cb31716b24d6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 2487689d-7a83-49d7-be78-fbb946ebef8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ce2c5992-690a-4ab4-8dc1-86d99f8ca647 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Migration 922aca6a-3d71-4286-8137-d883b7d4f8e4 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d74a0d56-8656-429c-a703-fca87e07798f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Migration cf3eda49-2122-4c27-a9c6-168c1bbcf3b9 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 945.306053] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e8655168-1fe8-4590-90a3-2ad9438d7761 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 945.313029] env[68244]: DEBUG oslo_vmware.api [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 945.313029] env[68244]: value = "task-2780597" [ 945.313029] env[68244]: _type = "Task" [ 945.313029] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.316203] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.316538] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.318232] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd737996-2af8-4c09-aa31-8607037cfc23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.324277] env[68244]: DEBUG oslo_vmware.api [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780597, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.328020] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 945.328020] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52391e9c-33be-c090-ef56-cfdc65fa51c9" [ 945.328020] env[68244]: _type = "Task" [ 945.328020] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.338293] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52391e9c-33be-c090-ef56-cfdc65fa51c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.362749] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780594, 'name': ReconfigVM_Task, 'duration_secs': 0.318051} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.363684] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 2487689d-7a83-49d7-be78-fbb946ebef8c/2487689d-7a83-49d7-be78-fbb946ebef8c.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.364148] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance '2487689d-7a83-49d7-be78-fbb946ebef8c' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 945.389071] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.389071] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0127b154-3049-4727-a546-27bc1569344e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.396743] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 945.396743] env[68244]: value = "task-2780598" [ 945.396743] env[68244]: _type = "Task" [ 945.396743] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.407871] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.434032] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155143} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.434032] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.434032] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.434032] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.535348] env[68244]: DEBUG nova.network.neutron [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Updated VIF entry in instance network info cache for port 2f9884c1-2165-4c14-9160-e16fbee901e8. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.535844] env[68244]: DEBUG nova.network.neutron [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Updating instance_info_cache with network_info: [{"id": "2f9884c1-2165-4c14-9160-e16fbee901e8", "address": "fa:16:3e:fa:fb:2d", "network": {"id": "9255af86-ca6e-46b2-9deb-9767b7fc1b5f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1148451853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e80cfa81cd442f9af3bf027b9059123", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f9884c1-21", "ovs_interfaceid": "2f9884c1-2165-4c14-9160-e16fbee901e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.809699] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 91d45b22-7963-4615-8455-7d910a9a0fed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.820828] env[68244]: DEBUG oslo_vmware.api [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780597, 'name': PowerOffVM_Task, 'duration_secs': 0.293082} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.821083] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.821257] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.821495] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c2f9a09-bf3b-4dbb-9bd1-19345609fb73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.837900] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52391e9c-33be-c090-ef56-cfdc65fa51c9, 'name': SearchDatastore_Task, 'duration_secs': 0.015507} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.838690] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e7ab45c-938b-47a6-8523-8500c90cfee8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.843669] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 945.843669] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525470c5-4d34-1c93-2059-17b0920c8724" [ 945.843669] env[68244]: _type = "Task" [ 945.843669] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.851530] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525470c5-4d34-1c93-2059-17b0920c8724, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.871350] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65035033-e550-4ab0-9932-65ec68947da0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.893055] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabfd9e5-1915-46e0-8fea-0be2cbeaaaec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.915636] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance '2487689d-7a83-49d7-be78-fbb946ebef8c' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 945.918899] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.919107] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.919314] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleting the datastore file [datastore2] 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.920088] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53408c9f-128f-42a7-8437-9a9a7eebc173 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.924952] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780598, 'name': PowerOffVM_Task, 'duration_secs': 0.294413} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.925241] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.925426] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance 'd74a0d56-8656-429c-a703-fca87e07798f' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 945.929789] env[68244]: DEBUG oslo_vmware.api [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 945.929789] env[68244]: value = "task-2780600" [ 945.929789] env[68244]: _type = "Task" [ 945.929789] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.941070] env[68244]: DEBUG oslo_vmware.api [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.039192] env[68244]: DEBUG oslo_concurrency.lockutils [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] Releasing lock "refresh_cache-ce2c5992-690a-4ab4-8dc1-86d99f8ca647" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.039531] env[68244]: DEBUG nova.compute.manager [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Received event network-changed-2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 946.039768] env[68244]: DEBUG nova.compute.manager [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Refreshing instance network info cache due to event network-changed-2c0febdf-3f54-4d82-8373-cfc91569d784. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 946.040060] env[68244]: DEBUG oslo_concurrency.lockutils [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] Acquiring lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.040267] env[68244]: DEBUG oslo_concurrency.lockutils [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] Acquired lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.040482] env[68244]: DEBUG nova.network.neutron [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Refreshing network info cache for port 2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 946.274556] env[68244]: DEBUG nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 946.301860] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 946.302082] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.302246] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 946.302432] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.302577] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 946.302722] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 946.302928] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 946.303100] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 946.303271] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 946.303445] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 946.303619] env[68244]: DEBUG nova.virt.hardware [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 946.304491] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e2fbc2-da12-435b-b5e8-bc28517a1a3d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.311878] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 54b9144a-f84a-4be2-b6de-c61af436ec4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.314785] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d9b672-72d2-4b09-8099-65b8cce231ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.357046] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525470c5-4d34-1c93-2059-17b0920c8724, 'name': SearchDatastore_Task, 'duration_secs': 0.021565} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.357046] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.357328] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ce2c5992-690a-4ab4-8dc1-86d99f8ca647/ce2c5992-690a-4ab4-8dc1-86d99f8ca647.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 946.357644] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d54bd8b-f924-4de8-baee-086d18122e75 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.366056] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 946.366056] env[68244]: value = "task-2780601" [ 946.366056] env[68244]: _type = "Task" [ 946.366056] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.376384] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.433093] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 946.433093] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.433093] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 946.433352] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.433352] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 946.433515] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 946.433658] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 946.433815] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 946.433987] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 946.434161] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 946.434337] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 946.440338] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8459cce-2780-4562-8350-bb1911908035 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.458728] env[68244]: DEBUG nova.network.neutron [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Port 97fdf60d-e090-463d-ae82-229571208a74 binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 946.464257] env[68244]: DEBUG oslo_vmware.api [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173395} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.465713] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.465902] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.466109] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.466290] env[68244]: INFO nova.compute.manager [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Took 1.19 seconds to destroy the instance on the hypervisor. [ 946.466549] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.466814] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 946.466814] env[68244]: value = "task-2780602" [ 946.466814] env[68244]: _type = "Task" [ 946.466814] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.467062] env[68244]: DEBUG nova.compute.manager [-] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.467167] env[68244]: DEBUG nova.network.neutron [-] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.480325] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.483076] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 946.483332] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.483509] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 946.483715] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.483859] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 946.484033] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 946.484280] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 946.484444] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 946.484627] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 946.484797] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 946.484988] env[68244]: DEBUG nova.virt.hardware [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 946.486108] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c544176-2589-4bc1-82cc-abb8325dd5a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.496109] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfee22b-fcc2-4a69-b7a4-acb77f2fb39c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.510414] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:32:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16438b13-f5f7-472e-af75-2da5ea4e4568', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.518190] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.518792] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.519070] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-505f955e-3546-45b8-a8d2-8167fe29815f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.545933] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.545933] env[68244]: value = "task-2780603" [ 946.545933] env[68244]: _type = "Task" [ 946.545933] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.557435] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780603, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.822453] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 946.822453] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 26 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 946.822453] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5696MB phys_disk=200GB used_disk=26GB total_vcpus=48 used_vcpus=26 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 946.877609] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780601, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.984181] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780602, 'name': ReconfigVM_Task, 'duration_secs': 0.224303} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.991529] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance 'd74a0d56-8656-429c-a703-fca87e07798f' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 947.030374] env[68244]: DEBUG nova.compute.manager [req-4bb20a5d-efc7-483c-8ec8-bd1ffecf852a req-e6acabd9-8a5f-461b-bf40-78ca3ef6a43f service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received event network-vif-plugged-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 947.030908] env[68244]: DEBUG oslo_concurrency.lockutils [req-4bb20a5d-efc7-483c-8ec8-bd1ffecf852a req-e6acabd9-8a5f-461b-bf40-78ca3ef6a43f service nova] Acquiring lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.030991] env[68244]: DEBUG oslo_concurrency.lockutils [req-4bb20a5d-efc7-483c-8ec8-bd1ffecf852a req-e6acabd9-8a5f-461b-bf40-78ca3ef6a43f service nova] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.031494] env[68244]: DEBUG oslo_concurrency.lockutils [req-4bb20a5d-efc7-483c-8ec8-bd1ffecf852a req-e6acabd9-8a5f-461b-bf40-78ca3ef6a43f service nova] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.031494] env[68244]: DEBUG nova.compute.manager [req-4bb20a5d-efc7-483c-8ec8-bd1ffecf852a req-e6acabd9-8a5f-461b-bf40-78ca3ef6a43f service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] No waiting events found dispatching network-vif-plugged-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.031645] env[68244]: WARNING nova.compute.manager [req-4bb20a5d-efc7-483c-8ec8-bd1ffecf852a req-e6acabd9-8a5f-461b-bf40-78ca3ef6a43f service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received unexpected event network-vif-plugged-9b1a2db2-4410-496c-9c02-c9af80c39755 for instance with vm_state building and task_state spawning. [ 947.060156] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780603, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.066674] env[68244]: DEBUG nova.network.neutron [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Updated VIF entry in instance network info cache for port 2c0febdf-3f54-4d82-8373-cfc91569d784. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.067070] env[68244]: DEBUG nova.network.neutron [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Updating instance_info_cache with network_info: [{"id": "2c0febdf-3f54-4d82-8373-cfc91569d784", "address": "fa:16:3e:f4:01:2d", "network": {"id": "a4bf516c-928d-40ce-8b22-0a96481b47b4", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-544316534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d259eb5849ec490fb550e2763d4df2a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c0febdf-3f", "ovs_interfaceid": "2c0febdf-3f54-4d82-8373-cfc91569d784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.119591] env[68244]: DEBUG nova.network.neutron [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Successfully updated port: 9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.363248] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdc0013-7c1c-4265-a4f2-af1ab3771d6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.373900] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2651e3bd-34d9-4240-a172-fa1f09e494a0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.382599] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544572} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.092943] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ce2c5992-690a-4ab4-8dc1-86d99f8ca647/ce2c5992-690a-4ab4-8dc1-86d99f8ca647.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 948.093230] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 948.102618] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:23:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='62be5865-e959-4fad-8733-6e5a5e5fb9d8',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-283340065',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 948.102835] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.102993] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 948.103192] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.103337] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 948.103482] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 948.103679] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 948.103834] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 948.103995] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 948.104169] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 948.104340] env[68244]: DEBUG nova.virt.hardware [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 948.109456] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfiguring VM instance instance-0000003a to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 948.109742] env[68244]: DEBUG nova.network.neutron [-] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.111874] env[68244]: DEBUG oslo_concurrency.lockutils [req-661afa54-ce0b-4520-bc94-357f6c6ae605 req-849e1b70-0443-4b3f-aba9-b375845c4d94 service nova] Releasing lock "refresh_cache-c70fb986-8396-4f11-98c4-1ed977a23bcd" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.112425] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.112557] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquired lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.112691] env[68244]: DEBUG nova.network.neutron [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.115066] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6f4a461-c93e-4e84-8ced-4ce5319954d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.117105] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73a7173d-82a5-487c-994a-d5d0306ac3db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.136205] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c720dfe-540b-4da2-9d86-3a62e20ae453 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.139358] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.139592] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.139723] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.148189] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780603, 'name': CreateVM_Task, 'duration_secs': 0.562473} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.152105] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.152448] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 948.152448] env[68244]: value = "task-2780604" [ 948.152448] env[68244]: _type = "Task" [ 948.152448] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.152746] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 948.152746] env[68244]: value = "task-2780605" [ 948.152746] env[68244]: _type = "Task" [ 948.152746] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.153376] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.153554] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.153877] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 948.155196] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4535a3-1a41-40f1-a586-0712963a417e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.160586] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e0362e0-6904-47fa-90a6-12df5217079b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.171377] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 948.171377] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b1dcd6-dd1b-5ab5-ebd0-f0548f121b85" [ 948.171377] env[68244]: _type = "Task" [ 948.171377] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.185910] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780604, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.186197] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.186896] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.197277] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b1dcd6-dd1b-5ab5-ebd0-f0548f121b85, 'name': SearchDatastore_Task, 'duration_secs': 0.012262} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.197580] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.197829] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.198110] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.198365] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.198551] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.199044] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72f835e4-6f15-4e51-a808-de9a971cb0de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.208295] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.208570] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.209396] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aec3415f-de47-4a7d-9a55-68e52cebaf34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.214987] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 948.214987] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523cdb5b-2913-f481-3c66-0969f1b900b4" [ 948.214987] env[68244]: _type = "Task" [ 948.214987] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.222810] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523cdb5b-2913-f481-3c66-0969f1b900b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.643820] env[68244]: INFO nova.compute.manager [-] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Took 2.18 seconds to deallocate network for instance. [ 948.665254] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780604, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076653} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.668279] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 948.669026] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e341d3b-ddac-4499-93c3-f272cbfdefb0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.676036] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780605, 'name': ReconfigVM_Task, 'duration_secs': 0.174299} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.684828] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfigured VM instance instance-0000003a to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 948.694347] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] ce2c5992-690a-4ab4-8dc1-86d99f8ca647/ce2c5992-690a-4ab4-8dc1-86d99f8ca647.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.695109] env[68244]: DEBUG nova.network.neutron [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.698016] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb6b4c2-d25c-4fec-8baa-288fb2cb37bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.701426] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.705044] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74fa8e97-ef2f-4564-b5ff-e066a6e4664e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.719840] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 948.720041] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.487s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.721066] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.258s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.721066] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.722616] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.090s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.722796] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.724394] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 23.117s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.726539] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 948.726687] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Cleaning up deleted instances {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 948.750469] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.751539] env[68244]: INFO nova.scheduler.client.report [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleted allocations for instance 085b318d-e704-46f9-89a6-679b8aa49f85 [ 948.753991] env[68244]: INFO nova.scheduler.client.report [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Deleted allocations for instance 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1 [ 948.757575] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c49afda-39e7-4bb7-8c46-b82f532309f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.771539] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 948.771539] env[68244]: value = "task-2780606" [ 948.771539] env[68244]: _type = "Task" [ 948.771539] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.780329] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523cdb5b-2913-f481-3c66-0969f1b900b4, 'name': SearchDatastore_Task, 'duration_secs': 0.009214} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.784621] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f23daf93-7ef7-4d8c-bd81-a7a00af0ff3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.788363] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 948.788363] env[68244]: value = "task-2780607" [ 948.788363] env[68244]: _type = "Task" [ 948.788363] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.794994] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 948.794994] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526605f6-ae45-ea64-6387-bdf5e0f50237" [ 948.794994] env[68244]: _type = "Task" [ 948.794994] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.795468] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780606, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.803944] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780607, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.811591] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526605f6-ae45-ea64-6387-bdf5e0f50237, 'name': SearchDatastore_Task, 'duration_secs': 0.009155} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.811881] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.812177] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 948.812446] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74591ddf-39d1-4455-9017-2801117ae718 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.819387] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 948.819387] env[68244]: value = "task-2780608" [ 948.819387] env[68244]: _type = "Task" [ 948.819387] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.830262] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.885355] env[68244]: DEBUG nova.network.neutron [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.149839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.193684] env[68244]: DEBUG nova.compute.manager [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Received event network-vif-deleted-dd7d02bc-40c3-4660-9ed6-536c09bae7f4 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 949.193862] env[68244]: DEBUG nova.compute.manager [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 949.193930] env[68244]: DEBUG nova.compute.manager [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing instance network info cache due to event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 949.194327] env[68244]: DEBUG oslo_concurrency.lockutils [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] Acquiring lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.209462] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.209645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.209821] env[68244]: DEBUG nova.network.neutron [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.231275] env[68244]: INFO nova.compute.claims [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.244780] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] There are 40 instances to clean {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 949.245131] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: df4674a2-87de-4507-950a-5941fae93aab] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 949.291085] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74c06206-0990-4f0d-ac22-1f19fb2bd961 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "085b318d-e704-46f9-89a6-679b8aa49f85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.137s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.294263] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd6b06a6-40e2-49bd-86b9-7e844312f406 tempest-SecurityGroupsTestJSON-1005586994 tempest-SecurityGroupsTestJSON-1005586994-project-member] Lock "8f0e60c8-7029-4dd5-b615-aa2b5d115aa1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.264s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.302962] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780606, 'name': ReconfigVM_Task, 'duration_secs': 0.337261} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.303663] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Reconfigured VM instance instance-00000043 to attach disk [datastore2] ce2c5992-690a-4ab4-8dc1-86d99f8ca647/ce2c5992-690a-4ab4-8dc1-86d99f8ca647.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.304339] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad085503-f403-453d-bdb2-7004205d1b0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.310097] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780607, 'name': ReconfigVM_Task, 'duration_secs': 0.371205} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.310776] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfigured VM instance instance-0000003a to attach disk [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.311084] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance 'd74a0d56-8656-429c-a703-fca87e07798f' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 949.316314] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 949.316314] env[68244]: value = "task-2780609" [ 949.316314] env[68244]: _type = "Task" [ 949.316314] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.328376] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780608, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496536} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.331426] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 949.331643] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.331888] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780609, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.332123] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d00f3b1e-8da0-4d73-b4d5-9fc3050afa40 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.341168] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 949.341168] env[68244]: value = "task-2780610" [ 949.341168] env[68244]: _type = "Task" [ 949.341168] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.349793] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.388469] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Releasing lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.388875] env[68244]: DEBUG nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Instance network_info: |[{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 949.389368] env[68244]: DEBUG oslo_concurrency.lockutils [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] Acquired lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.389491] env[68244]: DEBUG nova.network.neutron [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.391094] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:51:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f499bc9-78da-46c1-9274-19edf26d31cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b1a2db2-4410-496c-9c02-c9af80c39755', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.398852] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Creating folder: Project (c9a14e7ecf7e42d086738ccc845abeff). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.399948] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9ce9ca8-29eb-4781-ab26-dd90a250b0d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.413110] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Created folder: Project (c9a14e7ecf7e42d086738ccc845abeff) in parent group-v558876. [ 949.413374] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Creating folder: Instances. Parent ref: group-v559056. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.413703] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9ec308f-d50a-43bf-9ac9-d212c54ff173 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.423710] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Created folder: Instances in parent group-v559056. [ 949.424039] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.424309] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.424587] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f404864-c9f0-44e7-bb38-a64c31510fe0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.443973] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.443973] env[68244]: value = "task-2780613" [ 949.443973] env[68244]: _type = "Task" [ 949.443973] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.451809] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780613, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.750036] env[68244]: INFO nova.compute.resource_tracker [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating resource usage from migration cf3eda49-2122-4c27-a9c6-168c1bbcf3b9 [ 949.753068] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b9ab66d7-2f94-438a-a2fc-f4b45ad1ee56] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 949.818229] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97324bb9-5744-4b6b-a226-4cc70e00e938 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.830266] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780609, 'name': Rename_Task, 'duration_secs': 0.160189} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.847476] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.848669] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ec1ff87-578c-4665-9f8c-c94988f75de9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.853453] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d203597c-dcb8-41a6-a6d1-47bea2c7c206 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.876409] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081852} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.876711] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance 'd74a0d56-8656-429c-a703-fca87e07798f' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 949.883984] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 949.884699] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 949.884699] env[68244]: value = "task-2780614" [ 949.884699] env[68244]: _type = "Task" [ 949.884699] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.886041] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e9b53d-e01d-439b-ae7f-cabed9ba29e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.897650] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.921017] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.921689] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-422b06c2-e332-410f-b2e6-d7b2c1eff098 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.949182] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 949.949182] env[68244]: value = "task-2780615" [ 949.949182] env[68244]: _type = "Task" [ 949.949182] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.956141] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780613, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.964242] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780615, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.126785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.127036] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.127248] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "b0b79f25-f97d-4d59-ae80-2f8c09201073-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.127429] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.127592] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.133353] env[68244]: INFO nova.compute.manager [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Terminating instance [ 950.256708] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 874d6895-0f3d-4a99-b27a-cad627ddeecd] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 950.282461] env[68244]: DEBUG nova.network.neutron [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updated VIF entry in instance network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.283615] env[68244]: DEBUG nova.network.neutron [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.285509] env[68244]: DEBUG nova.network.neutron [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance_info_cache with network_info: [{"id": "97fdf60d-e090-463d-ae82-229571208a74", "address": "fa:16:3e:0e:67:3c", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97fdf60d-e0", "ovs_interfaceid": "97fdf60d-e090-463d-ae82-229571208a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.310048] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2684f608-208d-4ba8-bb37-2ccacf26221c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.319393] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec41c87-ab95-4582-b862-61ffbdb5cb90 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.349833] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40b6dcd-7113-4937-82db-b0153b1fedd5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.357917] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ce9793-18e4-4737-bf4e-3d8efe006180 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.372562] env[68244]: DEBUG nova.compute.provider_tree [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.398493] env[68244]: DEBUG oslo_vmware.api [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780614, 'name': PowerOnVM_Task, 'duration_secs': 0.468631} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.398760] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.398955] env[68244]: INFO nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Took 8.41 seconds to spawn the instance on the hypervisor. [ 950.399147] env[68244]: DEBUG nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 950.399954] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46e582e-2b3e-49b5-85a1-59fa1f7b1932 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.435084] env[68244]: DEBUG nova.network.neutron [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Port c9ac021e-cd9a-4092-8f49-fd149000b0aa binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 950.457456] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780613, 'name': CreateVM_Task, 'duration_secs': 0.541354} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.458167] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.460024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.460024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.460024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 950.463079] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9df844ea-8f31-4f5e-a1b0-448e4dc9cd0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.465415] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780615, 'name': ReconfigVM_Task, 'duration_secs': 0.291289} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.465831] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Reconfigured VM instance instance-00000025 to attach disk [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 950.466941] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c92a63c-48a1-4fb9-bc61-5822f534a323 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.470221] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 950.470221] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d75384-afa9-7c2c-306f-a34515a7e0a9" [ 950.470221] env[68244]: _type = "Task" [ 950.470221] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.476417] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 950.476417] env[68244]: value = "task-2780616" [ 950.476417] env[68244]: _type = "Task" [ 950.476417] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.482205] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d75384-afa9-7c2c-306f-a34515a7e0a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.489938] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780616, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.634285] env[68244]: DEBUG nova.compute.manager [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 950.634538] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.635726] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6506b1a0-cb6d-43b9-b26d-043d0e87f19e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.643289] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.643557] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c1fb441-5789-40e0-8615-69b01a571c2b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.648855] env[68244]: DEBUG oslo_vmware.api [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 950.648855] env[68244]: value = "task-2780617" [ 950.648855] env[68244]: _type = "Task" [ 950.648855] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.656721] env[68244]: DEBUG oslo_vmware.api [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.760088] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 184f7694-9cab-4184-a1c0-926763a81baf] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 950.786479] env[68244]: DEBUG oslo_concurrency.lockutils [req-8026c337-ef04-43de-a2d9-f249fe06a860 req-2be9e423-5694-4de5-b3f1-472a18642485 service nova] Releasing lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.790561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.875493] env[68244]: DEBUG nova.scheduler.client.report [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.916112] env[68244]: INFO nova.compute.manager [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Took 45.05 seconds to build instance. [ 950.981949] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d75384-afa9-7c2c-306f-a34515a7e0a9, 'name': SearchDatastore_Task, 'duration_secs': 0.029632} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.982241] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.982474] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.982713] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.982858] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.983444] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.986127] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41941329-75a4-4505-ac08-909e90c011f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.994281] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780616, 'name': Rename_Task, 'duration_secs': 0.160596} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.994551] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 950.994784] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db22a498-ec53-4f8e-baa9-0aedc303bd49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.997344] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.997524] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.998806] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-796d6e48-257b-4e1a-be70-0d5bcbf1c4cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.003034] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 951.003034] env[68244]: value = "task-2780618" [ 951.003034] env[68244]: _type = "Task" [ 951.003034] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.004265] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 951.004265] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5208a771-3235-9962-6d11-95c56115f2c2" [ 951.004265] env[68244]: _type = "Task" [ 951.004265] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.016613] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.020199] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5208a771-3235-9962-6d11-95c56115f2c2, 'name': SearchDatastore_Task, 'duration_secs': 0.010713} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.020969] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-818ae11d-e3a6-488c-9e67-02a69d6d23e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.026255] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 951.026255] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52deafa5-34ee-6ced-80b6-85fef6f08aec" [ 951.026255] env[68244]: _type = "Task" [ 951.026255] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.034308] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52deafa5-34ee-6ced-80b6-85fef6f08aec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.159384] env[68244]: DEBUG oslo_vmware.api [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780617, 'name': PowerOffVM_Task, 'duration_secs': 0.457824} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.159384] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.159522] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 951.159788] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7b63be8-af82-4a5b-ba0a-dd8b36ed76d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.228478] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.228842] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.229187] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleting the datastore file [datastore2] b0b79f25-f97d-4d59-ae80-2f8c09201073 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.229589] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9399ca8b-12da-4349-9124-7550816e5fa8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.237798] env[68244]: DEBUG oslo_vmware.api [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for the task: (returnval){ [ 951.237798] env[68244]: value = "task-2780620" [ 951.237798] env[68244]: _type = "Task" [ 951.237798] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.249250] env[68244]: DEBUG oslo_vmware.api [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.263609] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 8c00240d-5124-4ada-bd4d-4acd39a345c8] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 951.322053] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be80eb55-9f3e-4aa2-9ff2-5fe23831ab44 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.343236] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfab81f-d5bb-41d8-8846-06ea10921dcb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.350293] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance '2487689d-7a83-49d7-be78-fbb946ebef8c' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 951.382091] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.658s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.382316] env[68244]: INFO nova.compute.manager [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Migrating [ 951.390373] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.280s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.392425] env[68244]: INFO nova.compute.claims [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 951.417686] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99872c1e-cc1f-417e-a13f-1e91f8927738 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.564s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.459536] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "d74a0d56-8656-429c-a703-fca87e07798f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.459765] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.459940] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.515477] env[68244]: DEBUG oslo_vmware.api [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780618, 'name': PowerOnVM_Task, 'duration_secs': 0.499924} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.515708] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 951.515912] env[68244]: DEBUG nova.compute.manager [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 951.516763] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9268dea-489f-47a7-a3a0-b5538d71840f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.538743] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52deafa5-34ee-6ced-80b6-85fef6f08aec, 'name': SearchDatastore_Task, 'duration_secs': 0.021392} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.539744] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.540023] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.540307] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-377eb96b-c030-4db3-a126-565b0621b905 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.547540] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 951.547540] env[68244]: value = "task-2780621" [ 951.547540] env[68244]: _type = "Task" [ 951.547540] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.556775] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.754457] env[68244]: DEBUG oslo_vmware.api [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Task: {'id': task-2780620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300484} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.755056] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.755478] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.757020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.757020] env[68244]: INFO nova.compute.manager [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Took 1.12 seconds to destroy the instance on the hypervisor. [ 951.757020] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.757289] env[68244]: DEBUG nova.compute.manager [-] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.757504] env[68244]: DEBUG nova.network.neutron [-] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.768812] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 774ce6f8-6273-4f2b-b398-ee8c44d79520] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 951.856240] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.856900] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-016ec3e1-adaf-4dc8-8ee6-e17f60907919 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.865103] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 951.865103] env[68244]: value = "task-2780622" [ 951.865103] env[68244]: _type = "Task" [ 951.865103] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.873363] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.908364] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.908577] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.912213] env[68244]: DEBUG nova.network.neutron [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.033932] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.058887] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780621, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.193287] env[68244]: DEBUG nova.compute.manager [req-54d43bc3-d7d8-45cc-95c1-b100a07085f4 req-a7b47abe-66b9-4843-aaf7-405351e42fd5 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Received event network-vif-deleted-c231c79b-11e8-4987-8977-587e745b5cbe {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 952.193672] env[68244]: INFO nova.compute.manager [req-54d43bc3-d7d8-45cc-95c1-b100a07085f4 req-a7b47abe-66b9-4843-aaf7-405351e42fd5 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Neutron deleted interface c231c79b-11e8-4987-8977-587e745b5cbe; detaching it from the instance and deleting it from the info cache [ 952.193790] env[68244]: DEBUG nova.network.neutron [req-54d43bc3-d7d8-45cc-95c1-b100a07085f4 req-a7b47abe-66b9-4843-aaf7-405351e42fd5 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.270685] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 2d9dbf75-992d-4932-bd5d-84462494ebe8] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 952.377072] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780622, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.446045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.446880] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.446880] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.446880] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.447145] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.450402] env[68244]: INFO nova.compute.manager [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Terminating instance [ 952.510158] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.511071] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.511071] env[68244]: DEBUG nova.network.neutron [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.564843] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780621, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.736961} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.568309] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.568669] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.571452] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb40a2de-0995-413c-93c8-02ea04184614 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.579079] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 952.579079] env[68244]: value = "task-2780623" [ 952.579079] env[68244]: _type = "Task" [ 952.579079] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.590355] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.636715] env[68244]: DEBUG nova.network.neutron [-] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.699724] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34e6c6bf-bdd4-40d4-a9a9-fdabbddda5bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.713805] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd48e95-19cf-46cc-84ed-ceae89d49156 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.729634] env[68244]: DEBUG nova.network.neutron [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.760662] env[68244]: DEBUG nova.compute.manager [req-54d43bc3-d7d8-45cc-95c1-b100a07085f4 req-a7b47abe-66b9-4843-aaf7-405351e42fd5 service nova] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Detach interface failed, port_id=c231c79b-11e8-4987-8977-587e745b5cbe, reason: Instance b0b79f25-f97d-4d59-ae80-2f8c09201073 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 952.775453] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 6915d271-8346-41b5-a75b-2188fd3b57d1] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 952.879931] env[68244]: DEBUG oslo_vmware.api [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780622, 'name': PowerOnVM_Task, 'duration_secs': 0.565438} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.880237] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.880419] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6ad119-bcdb-40b2-b30d-9741fde8c277 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance '2487689d-7a83-49d7-be78-fbb946ebef8c' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 952.951019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93aa4a7-924c-4fc7-861f-8f4aaf89cf11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.954309] env[68244]: DEBUG nova.compute.manager [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 952.954685] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.955633] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05583b7d-f0b2-4141-b51b-6095b3784f08 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.965019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5079bb3-183a-493a-89cc-d13eaf5ae043 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.968309] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.969115] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68396c7b-313c-4ae3-8332-3b860f923c7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.004611] env[68244]: INFO nova.compute.manager [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Rebuilding instance [ 953.010050] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975f860d-52d6-4ae1-b3a9-c81e2c56b621 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.011691] env[68244]: DEBUG oslo_vmware.api [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 953.011691] env[68244]: value = "task-2780624" [ 953.011691] env[68244]: _type = "Task" [ 953.011691] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.020559] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7dea6b-5bbf-47e8-968f-aa29bd9c7416 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.029112] env[68244]: DEBUG oslo_vmware.api [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780624, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.039139] env[68244]: DEBUG nova.compute.provider_tree [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.060488] env[68244]: DEBUG nova.compute.manager [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.061353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130d3929-021e-491f-a87e-d12301436f92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.088352] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.268542} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.088631] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.089445] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9b32f9-9d91-4dce-bcc7-104188e41945 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.112308] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.116326] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b5ca346-c694-4e6a-b7f9-ff1315ab18b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.137471] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 953.137471] env[68244]: value = "task-2780625" [ 953.137471] env[68244]: _type = "Task" [ 953.137471] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.140847] env[68244]: INFO nova.compute.manager [-] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Took 1.38 seconds to deallocate network for instance. [ 953.151036] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780625, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.233656] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.278428] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 340aa1e7-dc0a-4cba-8979-0c591830e9db] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 953.438860] env[68244]: DEBUG nova.network.neutron [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [{"id": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "address": "fa:16:3e:92:a5:cb", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ac021e-cd", "ovs_interfaceid": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.522075] env[68244]: DEBUG oslo_vmware.api [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780624, 'name': PowerOffVM_Task, 'duration_secs': 0.437136} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.522438] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.522514] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 953.522741] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4070cc11-d68c-4bd8-8351-4da0bc19604e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.542628] env[68244]: DEBUG nova.scheduler.client.report [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.591899] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 953.592252] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 953.592452] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleting the datastore file [datastore2] ce2c5992-690a-4ab4-8dc1-86d99f8ca647 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.592713] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d32749f3-6219-4633-ae40-d91cc15d394e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.599080] env[68244]: DEBUG oslo_vmware.api [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for the task: (returnval){ [ 953.599080] env[68244]: value = "task-2780627" [ 953.599080] env[68244]: _type = "Task" [ 953.599080] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.606938] env[68244]: DEBUG oslo_vmware.api [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780627, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.647860] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.648158] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.781788] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 100ec1f9-6776-4832-a4c2-e9a4def0d350] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 953.946282] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.047449] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.048047] env[68244]: DEBUG nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 954.051538] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.096s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.051538] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.054569] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.823s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.056021] env[68244]: INFO nova.compute.claims [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.076350] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.076600] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fd73df2-8242-406a-9275-499ac75ea275 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.085534] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 954.085534] env[68244]: value = "task-2780628" [ 954.085534] env[68244]: _type = "Task" [ 954.085534] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.094562] env[68244]: INFO nova.scheduler.client.report [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Deleted allocations for instance 91232cad-54b3-45af-bb54-af268de182fa [ 954.103222] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.114421] env[68244]: DEBUG oslo_vmware.api [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Task: {'id': task-2780627, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169137} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.115918] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.115918] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 954.115918] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.115918] env[68244]: INFO nova.compute.manager [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Took 1.16 seconds to destroy the instance on the hypervisor. [ 954.115918] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.116277] env[68244]: DEBUG nova.compute.manager [-] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 954.116365] env[68244]: DEBUG nova.network.neutron [-] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.150209] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780625, 'name': ReconfigVM_Task, 'duration_secs': 0.928495} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.150411] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Reconfigured VM instance instance-00000044 to attach disk [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.151413] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7aea90bb-5683-4134-a4a7-5027fca77a4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.161384] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 954.161384] env[68244]: value = "task-2780629" [ 954.161384] env[68244]: _type = "Task" [ 954.161384] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.174888] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780629, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.285629] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f5724973-2349-481c-b2ba-d1287f09c1db] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 954.472673] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db13610-c38b-4b07-8c80-f4f038886fc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.491416] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f312f705-0e28-4162-a4a2-2d0bb172a5d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.498147] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance 'd74a0d56-8656-429c-a703-fca87e07798f' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 954.561047] env[68244]: DEBUG nova.compute.utils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 954.567114] env[68244]: DEBUG nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 954.567114] env[68244]: DEBUG nova.network.neutron [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 954.595490] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780628, 'name': PowerOffVM_Task, 'duration_secs': 0.213548} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.595766] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.595995] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.597108] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0669413a-0f60-4740-a553-4bc34d3a7e88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.604864] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.606079] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f63450f-0d58-4a60-a41c-dfa4a545d7c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.612386] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a4b22b2-a264-49a6-9496-f7235b902a13 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "91232cad-54b3-45af-bb54-af268de182fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.072s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.623013] env[68244]: DEBUG nova.policy [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6af77f00c84d4e99bea878bc30dcc361', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '821b99c053aa45b4b6b8fb09eb63aa73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 954.660337] env[68244]: DEBUG nova.compute.manager [req-f87742aa-7761-4907-bdfc-20d9bd5d4d4a req-8f4a38f3-d60c-4260-b43c-219cdae37daf service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Received event network-vif-deleted-2f9884c1-2165-4c14-9160-e16fbee901e8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 954.660552] env[68244]: INFO nova.compute.manager [req-f87742aa-7761-4907-bdfc-20d9bd5d4d4a req-8f4a38f3-d60c-4260-b43c-219cdae37daf service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Neutron deleted interface 2f9884c1-2165-4c14-9160-e16fbee901e8; detaching it from the instance and deleting it from the info cache [ 954.660749] env[68244]: DEBUG nova.network.neutron [req-f87742aa-7761-4907-bdfc-20d9bd5d4d4a req-8f4a38f3-d60c-4260-b43c-219cdae37daf service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.675013] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780629, 'name': Rename_Task, 'duration_secs': 0.165606} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.676361] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.676640] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.676823] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.676996] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleting the datastore file [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.677754] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24145a12-b182-4f3d-8f15-5381ee589f6d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.679431] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c909298-8238-4b5a-a1b5-ffa857afa45a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.685585] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 954.685585] env[68244]: value = "task-2780631" [ 954.685585] env[68244]: _type = "Task" [ 954.685585] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.689760] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 954.689760] env[68244]: value = "task-2780632" [ 954.689760] env[68244]: _type = "Task" [ 954.689760] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.695930] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.701038] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.751414] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3c8e4b-a2af-4b7f-a4d0-6fd049609cf2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.773576] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance 'e8655168-1fe8-4590-90a3-2ad9438d7761' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 954.790936] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f113bb6c-f05a-4253-98af-ca827fcbb723] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 955.005349] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.005719] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3406c1ba-2f29-4cac-ae09-85290c995370 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.011228] env[68244]: DEBUG nova.network.neutron [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Successfully created port: d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.019348] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 955.019348] env[68244]: value = "task-2780633" [ 955.019348] env[68244]: _type = "Task" [ 955.019348] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.031675] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.055843] env[68244]: DEBUG nova.network.neutron [-] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.066194] env[68244]: DEBUG nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 955.165508] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-024bf68e-30f0-464b-ba1d-af31e84fd7dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.176199] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94ed030-7088-4c8d-b5b5-6299b5a7952d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.207927] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780631, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.213835] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167027} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.228170] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.228404] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 955.228612] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 955.231560] env[68244]: DEBUG nova.compute.manager [req-f87742aa-7761-4907-bdfc-20d9bd5d4d4a req-8f4a38f3-d60c-4260-b43c-219cdae37daf service nova] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Detach interface failed, port_id=2f9884c1-2165-4c14-9160-e16fbee901e8, reason: Instance ce2c5992-690a-4ab4-8dc1-86d99f8ca647 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 955.281390] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.281719] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b5cc2e4-f832-4e6d-a3c2-7be2094ef0be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.289435] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 955.289435] env[68244]: value = "task-2780634" [ 955.289435] env[68244]: _type = "Task" [ 955.289435] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.297604] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: c9f5fbeb-28b6-4b41-9156-5b90bc19977c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 955.305859] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 955.306138] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance 'e8655168-1fe8-4590-90a3-2ad9438d7761' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 955.529302] env[68244]: DEBUG oslo_vmware.api [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780633, 'name': PowerOnVM_Task, 'duration_secs': 0.433951} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.529534] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.529720] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ee487a96-4954-4fa2-a0f9-5aa3085f2d05 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance 'd74a0d56-8656-429c-a703-fca87e07798f' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 955.565249] env[68244]: INFO nova.compute.manager [-] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Took 1.45 seconds to deallocate network for instance. [ 955.680490] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5db07f-394f-4451-a5c7-c1ca2646a08e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.688352] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302405d3-9531-4324-9c6b-9e7a22e9fe87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.700485] env[68244]: DEBUG oslo_vmware.api [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780631, 'name': PowerOnVM_Task, 'duration_secs': 0.71219} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.724709] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.724709] env[68244]: INFO nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Took 9.45 seconds to spawn the instance on the hypervisor. [ 955.724824] env[68244]: DEBUG nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.727384] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3490169b-6ad2-41f1-8f07-4e42ffd6a3b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.730850] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd49732-6203-4c0c-a0e3-5f818c3e0d5c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.744119] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125ed1e7-7916-4ee2-a5c3-492ddfb303bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.762614] env[68244]: DEBUG nova.compute.provider_tree [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.801072] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d1fb6fff-b1b7-4c1b-8995-41628cadf7d5] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 955.812158] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 955.812423] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.812580] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.812762] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.812909] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.813110] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 955.813361] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 955.813563] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 955.813754] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 955.813897] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 955.814085] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.820419] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd64325d-243d-4368-9b50-0518dbe663ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.841667] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 955.841667] env[68244]: value = "task-2780636" [ 955.841667] env[68244]: _type = "Task" [ 955.841667] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.851876] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780636, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.072700] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.084888] env[68244]: DEBUG nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 956.115191] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='644fb048ddb174e8f520b0d5308f2845',container_format='bare',created_at=2025-03-06T03:25:17Z,direct_url=,disk_format='vmdk',id=65d86da3-59fb-4ec7-873b-2525143225e1,min_disk=1,min_ram=0,name='tempest-test-snap-1775444646',owner='821b99c053aa45b4b6b8fb09eb63aa73',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-06T03:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.115664] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.115737] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.115884] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.116565] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.116565] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.116565] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.116759] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.116797] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.116986] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.117236] env[68244]: DEBUG nova.virt.hardware [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.118143] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432e96a3-25bd-4c51-ab34-3bdb4917f14d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.126549] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b07bf9f-1ee0-49c5-b1fa-bda4ed94807e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.140631] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "2487689d-7a83-49d7-be78-fbb946ebef8c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.140893] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.141081] env[68244]: DEBUG nova.compute.manager [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Going to confirm migration 2 {{(pid=68244) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 956.263468] env[68244]: INFO nova.compute.manager [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Took 49.51 seconds to build instance. [ 956.265318] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.265614] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.265827] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.266092] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.266272] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.266481] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.266748] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.266951] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.267225] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.267368] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.267565] env[68244]: DEBUG nova.virt.hardware [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.268607] env[68244]: DEBUG nova.scheduler.client.report [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.272671] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80e0710-c7f1-4a40-85a7-00c003695371 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.281692] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805a1160-d2ca-45d2-a9f4-66ff6dd69344 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.297692] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:32:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16438b13-f5f7-472e-af75-2da5ea4e4568', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.305715] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 956.306365] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 4c394e1d-8fef-4b7a-ac9f-550f263c1d7f] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 956.308012] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.308496] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3dfb973f-7701-4176-8730-87ea3af21aec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.332728] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.332728] env[68244]: value = "task-2780637" [ 956.332728] env[68244]: _type = "Task" [ 956.332728] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.341059] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780637, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.354213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.354496] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.355633] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780636, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.612046] env[68244]: DEBUG nova.network.neutron [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Successfully updated port: d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.696158] env[68244]: DEBUG nova.compute.manager [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Received event network-vif-plugged-d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 956.696457] env[68244]: DEBUG oslo_concurrency.lockutils [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] Acquiring lock "91d45b22-7963-4615-8455-7d910a9a0fed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.696668] env[68244]: DEBUG oslo_concurrency.lockutils [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] Lock "91d45b22-7963-4615-8455-7d910a9a0fed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.696841] env[68244]: DEBUG oslo_concurrency.lockutils [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] Lock "91d45b22-7963-4615-8455-7d910a9a0fed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.697081] env[68244]: DEBUG nova.compute.manager [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] No waiting events found dispatching network-vif-plugged-d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.697261] env[68244]: WARNING nova.compute.manager [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Received unexpected event network-vif-plugged-d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 for instance with vm_state building and task_state spawning. [ 956.697426] env[68244]: DEBUG nova.compute.manager [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Received event network-changed-d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 956.697580] env[68244]: DEBUG nova.compute.manager [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Refreshing instance network info cache due to event network-changed-d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 956.697760] env[68244]: DEBUG oslo_concurrency.lockutils [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] Acquiring lock "refresh_cache-91d45b22-7963-4615-8455-7d910a9a0fed" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.697895] env[68244]: DEBUG oslo_concurrency.lockutils [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] Acquired lock "refresh_cache-91d45b22-7963-4615-8455-7d910a9a0fed" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.698061] env[68244]: DEBUG nova.network.neutron [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Refreshing network info cache for port d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.730605] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.730834] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.731029] env[68244]: DEBUG nova.network.neutron [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.731227] env[68244]: DEBUG nova.objects.instance [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lazy-loading 'info_cache' on Instance uuid 2487689d-7a83-49d7-be78-fbb946ebef8c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.776362] env[68244]: DEBUG oslo_concurrency.lockutils [None req-eb34ffa1-dc91-4dde-b6c7-31eeb9f941d2 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.032s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.777049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.777525] env[68244]: DEBUG nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 956.779997] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.898s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.781761] env[68244]: INFO nova.compute.claims [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 956.809719] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 086dda59-4bd2-4ca2-a758-c120f1271f42] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 956.852542] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780637, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.857499] env[68244]: DEBUG nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 956.860142] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780636, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.116600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "refresh_cache-91d45b22-7963-4615-8455-7d910a9a0fed" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.231889] env[68244]: DEBUG nova.network.neutron [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.268288] env[68244]: INFO nova.compute.manager [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Rescuing [ 957.268635] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.268790] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquired lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.268984] env[68244]: DEBUG nova.network.neutron [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 957.289038] env[68244]: DEBUG nova.compute.utils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 957.292150] env[68244]: DEBUG nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 957.292322] env[68244]: DEBUG nova.network.neutron [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 957.313442] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 6abb889a-2e96-4aba-8e36-c4c8997dd4e2] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 957.332263] env[68244]: DEBUG nova.network.neutron [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.344105] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780637, 'name': CreateVM_Task, 'duration_secs': 0.718206} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.346198] env[68244]: DEBUG nova.policy [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1272dd4182b44b208e76f80a06758a25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'acbfe945ee0c4cbcbaa57b7a8d10a40a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 957.350275] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.351186] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.351359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.351671] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 957.352530] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae3d13a8-09f8-4ed3-bbef-a330df77b5d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.357555] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780636, 'name': ReconfigVM_Task, 'duration_secs': 1.306555} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.358139] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance 'e8655168-1fe8-4590-90a3-2ad9438d7761' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 957.365562] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 957.365562] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5240518f-df21-7bf0-d129-e10218326cba" [ 957.365562] env[68244]: _type = "Task" [ 957.365562] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.377524] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5240518f-df21-7bf0-d129-e10218326cba, 'name': SearchDatastore_Task, 'duration_secs': 0.009884} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.377959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.378350] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.378605] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.378747] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.378916] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.379173] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b23ecc9-2a02-47a0-a32a-94fae2016c77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.384929] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.386324] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.386497] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.387256] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea7a2ef6-1e46-4856-86ae-d1bf7e2fab56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.392685] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 957.392685] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256b88c-c9a9-ba31-127a-089676632dd5" [ 957.392685] env[68244]: _type = "Task" [ 957.392685] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.400929] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256b88c-c9a9-ba31-127a-089676632dd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.611190] env[68244]: DEBUG nova.network.neutron [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Successfully created port: 2258be90-64bd-4241-81f3-2cb028b7a8cc {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.795774] env[68244]: DEBUG nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 957.816897] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 09ab8712-0f7a-4122-9d61-19da3e65d22b] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 957.841237] env[68244]: DEBUG oslo_concurrency.lockutils [req-698e018a-f349-416a-8187-a16caec55f2b req-27c06133-5cec-4952-9b55-380e15378532 service nova] Releasing lock "refresh_cache-91d45b22-7963-4615-8455-7d910a9a0fed" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.846998] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "refresh_cache-91d45b22-7963-4615-8455-7d910a9a0fed" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.847192] env[68244]: DEBUG nova.network.neutron [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 957.870710] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 957.870710] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.870710] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 957.870963] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.870963] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 957.871550] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 957.871550] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 957.871550] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 957.871674] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 957.872626] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 957.873080] env[68244]: DEBUG nova.virt.hardware [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 957.878952] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Reconfiguring VM instance instance-00000009 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 957.882340] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ee54fd6-1d9c-4127-84a9-5272d4477c28 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.910361] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5256b88c-c9a9-ba31-127a-089676632dd5, 'name': SearchDatastore_Task, 'duration_secs': 0.008838} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.914092] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 957.914092] env[68244]: value = "task-2780638" [ 957.914092] env[68244]: _type = "Task" [ 957.914092] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.914092] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb0329d6-54f0-4396-95e5-758389c43113 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.932430] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.932772] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 957.932772] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c1fc85-2a7e-0e9f-145d-4f7af88a3394" [ 957.932772] env[68244]: _type = "Task" [ 957.932772] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.946760] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c1fc85-2a7e-0e9f-145d-4f7af88a3394, 'name': SearchDatastore_Task, 'duration_secs': 0.011717} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.947044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.947317] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.947583] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2359f9e3-d260-487a-a1b0-2f19ebb1dd3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.954419] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 957.954419] env[68244]: value = "task-2780639" [ 957.954419] env[68244]: _type = "Task" [ 957.954419] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.962529] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780639, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.973680] env[68244]: DEBUG nova.network.neutron [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Port c9ac021e-cd9a-4092-8f49-fd149000b0aa binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 957.973948] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.974123] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.974299] env[68244]: DEBUG nova.network.neutron [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.164709] env[68244]: DEBUG nova.network.neutron [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance_info_cache with network_info: [{"id": "97fdf60d-e090-463d-ae82-229571208a74", "address": "fa:16:3e:0e:67:3c", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap97fdf60d-e0", "ovs_interfaceid": "97fdf60d-e090-463d-ae82-229571208a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.169935] env[68244]: DEBUG nova.network.neutron [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.324058] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b0090ea8-98fe-42a0-97cc-40d7578851a9] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 958.365175] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e788ca-abe9-462c-ba11-01f2ead6d82a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.376316] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3e1d38-87ea-4895-907b-74430b7fdb29 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.409615] env[68244]: DEBUG nova.network.neutron [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.412279] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c9ad92-69a8-4d15-98f7-0cfd0bbeaa96 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.422945] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8416ae57-016d-4d7c-8c20-77c5379c0bcd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.432298] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780638, 'name': ReconfigVM_Task, 'duration_secs': 0.207498} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.433034] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Reconfigured VM instance instance-00000009 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 958.434071] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa66d2d8-ed56-4254-97d7-6ceb3663c958 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.447591] env[68244]: DEBUG nova.compute.provider_tree [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.471230] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] e8655168-1fe8-4590-90a3-2ad9438d7761/e8655168-1fe8-4590-90a3-2ad9438d7761.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.475447] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a62b0200-9afa-471b-9610-b3050d3d58a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.500451] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780639, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532634} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.502013] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 958.502275] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 958.502562] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 958.502562] env[68244]: value = "task-2780640" [ 958.502562] env[68244]: _type = "Task" [ 958.502562] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.502757] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94e49bbe-3a83-450a-8577-83b2c8a4e00e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.513272] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780640, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.514536] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 958.514536] env[68244]: value = "task-2780641" [ 958.514536] env[68244]: _type = "Task" [ 958.514536] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.667308] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-2487689d-7a83-49d7-be78-fbb946ebef8c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.667589] env[68244]: DEBUG nova.objects.instance [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lazy-loading 'migration_context' on Instance uuid 2487689d-7a83-49d7-be78-fbb946ebef8c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.672980] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Releasing lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.812475] env[68244]: DEBUG nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 958.830232] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: bbc08614-926e-4209-abec-4808f223943a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 958.836110] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.836110] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.836110] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.836110] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.836110] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.836361] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.836361] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.836589] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.836784] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.836948] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.837195] env[68244]: DEBUG nova.virt.hardware [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.838096] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fae992f-02e2-4d6f-8655-f5a274e5f00d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.846931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b19c97d-1177-4a66-a68c-b1650eadae4b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.865612] env[68244]: DEBUG nova.network.neutron [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Updating instance_info_cache with network_info: [{"id": "d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3", "address": "fa:16:3e:35:82:ac", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5504b3b-2d", "ovs_interfaceid": "d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.951269] env[68244]: DEBUG nova.scheduler.client.report [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.963586] env[68244]: DEBUG nova.network.neutron [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [{"id": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "address": "fa:16:3e:92:a5:cb", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ac021e-cd", "ovs_interfaceid": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.015625] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.023786] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780641, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079065} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.024438] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.025413] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f44f15a-c90d-4e9f-aea9-6c4fba4cd4f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.049439] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.050147] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a05b69eb-c24a-492c-bf25-e631bd4e35f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.071269] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 959.071269] env[68244]: value = "task-2780642" [ 959.071269] env[68244]: _type = "Task" [ 959.071269] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.079886] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780642, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.170344] env[68244]: DEBUG nova.objects.base [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Object Instance<2487689d-7a83-49d7-be78-fbb946ebef8c> lazy-loaded attributes: info_cache,migration_context {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 959.171531] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8564b48-c5b7-43da-bca4-7a6a83ce8045 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.191903] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-957b91c6-b5af-4f9e-b7a8-9b98bd31b383 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.197825] env[68244]: DEBUG oslo_vmware.api [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 959.197825] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527da3c3-9e28-721a-19e0-24151584ccee" [ 959.197825] env[68244]: _type = "Task" [ 959.197825] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.207910] env[68244]: DEBUG oslo_vmware.api [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527da3c3-9e28-721a-19e0-24151584ccee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.256349] env[68244]: DEBUG nova.compute.manager [req-f066ae73-44fd-4e37-bb22-e4bedb4e95ca req-347db4a2-a9f9-42f4-9a68-7def67554e41 service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Received event network-vif-plugged-2258be90-64bd-4241-81f3-2cb028b7a8cc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 959.256349] env[68244]: DEBUG oslo_concurrency.lockutils [req-f066ae73-44fd-4e37-bb22-e4bedb4e95ca req-347db4a2-a9f9-42f4-9a68-7def67554e41 service nova] Acquiring lock "54b9144a-f84a-4be2-b6de-c61af436ec4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.256349] env[68244]: DEBUG oslo_concurrency.lockutils [req-f066ae73-44fd-4e37-bb22-e4bedb4e95ca req-347db4a2-a9f9-42f4-9a68-7def67554e41 service nova] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.256349] env[68244]: DEBUG oslo_concurrency.lockutils [req-f066ae73-44fd-4e37-bb22-e4bedb4e95ca req-347db4a2-a9f9-42f4-9a68-7def67554e41 service nova] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.256349] env[68244]: DEBUG nova.compute.manager [req-f066ae73-44fd-4e37-bb22-e4bedb4e95ca req-347db4a2-a9f9-42f4-9a68-7def67554e41 service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] No waiting events found dispatching network-vif-plugged-2258be90-64bd-4241-81f3-2cb028b7a8cc {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 959.256349] env[68244]: WARNING nova.compute.manager [req-f066ae73-44fd-4e37-bb22-e4bedb4e95ca req-347db4a2-a9f9-42f4-9a68-7def67554e41 service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Received unexpected event network-vif-plugged-2258be90-64bd-4241-81f3-2cb028b7a8cc for instance with vm_state building and task_state spawning. [ 959.300151] env[68244]: DEBUG nova.network.neutron [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Successfully updated port: 2258be90-64bd-4241-81f3-2cb028b7a8cc {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 959.333517] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f270caad-1b02-4d5b-a435-37b77c05c4e7] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 959.368370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "refresh_cache-91d45b22-7963-4615-8455-7d910a9a0fed" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.368799] env[68244]: DEBUG nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Instance network_info: |[{"id": "d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3", "address": "fa:16:3e:35:82:ac", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5504b3b-2d", "ovs_interfaceid": "d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 959.369271] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:82:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.377077] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 959.377541] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.377783] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-470188e5-b0e1-48c0-adf0-d93a9962e660 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.398159] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.398159] env[68244]: value = "task-2780643" [ 959.398159] env[68244]: _type = "Task" [ 959.398159] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.407682] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780643, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.456232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.676s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.456782] env[68244]: DEBUG nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 959.459633] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.794s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.459851] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.461853] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.312s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.462076] env[68244]: DEBUG nova.objects.instance [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lazy-loading 'resources' on Instance uuid 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.466035] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.482046] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.482318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.482559] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.483037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.483037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.486091] env[68244]: INFO nova.scheduler.client.report [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Deleted allocations for instance b50ed409-296a-4b6d-81d2-f8cfc24de24e [ 959.487856] env[68244]: INFO nova.compute.manager [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Terminating instance [ 959.514648] env[68244]: DEBUG oslo_vmware.api [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780640, 'name': ReconfigVM_Task, 'duration_secs': 0.746422} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.514925] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Reconfigured VM instance instance-00000009 to attach disk [datastore2] e8655168-1fe8-4590-90a3-2ad9438d7761/e8655168-1fe8-4590-90a3-2ad9438d7761.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.515221] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance 'e8655168-1fe8-4590-90a3-2ad9438d7761' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 959.581088] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780642, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.708151] env[68244]: DEBUG oslo_vmware.api [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527da3c3-9e28-721a-19e0-24151584ccee, 'name': SearchDatastore_Task, 'duration_secs': 0.008394} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.710183] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.806378] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "refresh_cache-54b9144a-f84a-4be2-b6de-c61af436ec4e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.806710] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquired lock "refresh_cache-54b9144a-f84a-4be2-b6de-c61af436ec4e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.806710] env[68244]: DEBUG nova.network.neutron [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.836214] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: fd4d5494-042b-457e-a826-dee4d87c0032] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 959.908347] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780643, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.965499] env[68244]: DEBUG nova.compute.utils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 959.970325] env[68244]: DEBUG nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 959.970491] env[68244]: DEBUG nova.network.neutron [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 959.972961] env[68244]: DEBUG nova.compute.manager [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68244) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 959.973201] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.993788] env[68244]: DEBUG nova.compute.manager [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.994041] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.996892] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c7aef4-f8ae-4d4a-8154-641ec78f823b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.002431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-32a6b9c6-93f3-4b7d-956f-acd88f86844e tempest-ServerPasswordTestJSON-1383228980 tempest-ServerPasswordTestJSON-1383228980-project-member] Lock "b50ed409-296a-4b6d-81d2-f8cfc24de24e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.390s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.008453] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.008828] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d8548c8-954a-4472-ae5a-fa2b35b9bb9b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.011690] env[68244]: DEBUG nova.policy [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03702d95a6b04249beb0e4178ef5c747', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd48f74a8554407593bb2c69b3191d85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 960.019948] env[68244]: DEBUG oslo_vmware.api [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 960.019948] env[68244]: value = "task-2780644" [ 960.019948] env[68244]: _type = "Task" [ 960.019948] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.022170] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1b0329-2b51-4d79-bba3-e3c6ddb69fa3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.053861] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1146be57-ef7a-4000-81b6-d0655f0e7906 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.056520] env[68244]: DEBUG oslo_vmware.api [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.073343] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance 'e8655168-1fe8-4590-90a3-2ad9438d7761' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 960.088145] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780642, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.214808] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.214808] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1185f5f-9727-4dee-bae4-bdc5179fb70e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.220520] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 960.220520] env[68244]: value = "task-2780645" [ 960.220520] env[68244]: _type = "Task" [ 960.220520] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.235213] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.339293] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: cb607c5e-797d-4e52-9ba4-66113718dacc] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 960.367283] env[68244]: DEBUG nova.network.neutron [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 960.412873] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780643, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.417955] env[68244]: DEBUG nova.network.neutron [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Successfully created port: 58a98d40-5d72-49a3-9bec-97e83511b260 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.470391] env[68244]: DEBUG nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 960.516080] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4259a6-964f-43e9-ab3b-62a79bf92055 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.529874] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ce3a0c-c9bd-4fcc-8e45-c600e056c5a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.570217] env[68244]: DEBUG oslo_vmware.api [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780644, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.571548] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce7e572-4a88-411c-b582-1c75fc12b128 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.582453] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780642, 'name': ReconfigVM_Task, 'duration_secs': 1.101646} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.584569] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Reconfigured VM instance instance-00000025 to attach disk [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b/cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.588472] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d27ab16-c169-47ba-9056-25b369f56336 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.591124] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239779cb-4220-4028-8d81-7e6b8c7d7f69 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.606113] env[68244]: DEBUG nova.compute.provider_tree [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.609586] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 960.609586] env[68244]: value = "task-2780646" [ 960.609586] env[68244]: _type = "Task" [ 960.609586] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.618933] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780646, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.635561] env[68244]: DEBUG nova.network.neutron [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Updating instance_info_cache with network_info: [{"id": "2258be90-64bd-4241-81f3-2cb028b7a8cc", "address": "fa:16:3e:19:65:07", "network": {"id": "773e2ef7-0a29-4903-a18b-8f2d3c804c38", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-677243533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "acbfe945ee0c4cbcbaa57b7a8d10a40a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2258be90-64", "ovs_interfaceid": "2258be90-64bd-4241-81f3-2cb028b7a8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.665104] env[68244]: DEBUG nova.network.neutron [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Port 3a4a896b-0463-43a3-8487-d50328142090 binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 960.730588] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780645, 'name': PowerOffVM_Task, 'duration_secs': 0.217036} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.730588] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.731386] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbf6837-5454-4ca8-9c7b-8c167921c4fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.749933] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292a2cb4-78ea-408e-b5f2-7751bfa75abd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.787080] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.787382] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c22ca198-d371-4522-baeb-45206471ad0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.794719] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 960.794719] env[68244]: value = "task-2780647" [ 960.794719] env[68244]: _type = "Task" [ 960.794719] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.804175] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 960.804395] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.804655] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.804787] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.804962] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.805229] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08534bc3-bfc0-4f39-bf5d-da9a78887fbc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.812963] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.813165] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.814253] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7715637d-7ca2-46b1-b366-73a241994ad6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.820143] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 960.820143] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529feba0-8743-aac1-d145-c9d6a838881f" [ 960.820143] env[68244]: _type = "Task" [ 960.820143] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.828433] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529feba0-8743-aac1-d145-c9d6a838881f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.845761] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: fe873e92-1481-4c5f-b4ca-90e052bd10c0] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 960.911029] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780643, 'name': CreateVM_Task, 'duration_secs': 1.319569} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.911029] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.911029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.911029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.911029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 960.911029] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fde3419-6a37-46dc-b855-e15bcffcf3d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.916049] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 960.916049] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d94a8d-4ed1-7eed-1715-275d41563482" [ 960.916049] env[68244]: _type = "Task" [ 960.916049] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.925821] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d94a8d-4ed1-7eed-1715-275d41563482, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.035530] env[68244]: DEBUG oslo_vmware.api [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780644, 'name': PowerOffVM_Task, 'duration_secs': 0.551593} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.035801] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.035966] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.036238] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24346705-7091-4a2f-9d8a-d987ae5ca869 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.094664] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.094896] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.095092] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Deleting the datastore file [datastore2] f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.095353] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a88f3968-8888-4321-b59b-31514bd6c22f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.106448] env[68244]: DEBUG oslo_vmware.api [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for the task: (returnval){ [ 961.106448] env[68244]: value = "task-2780649" [ 961.106448] env[68244]: _type = "Task" [ 961.106448] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.111141] env[68244]: DEBUG nova.scheduler.client.report [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.124302] env[68244]: DEBUG oslo_vmware.api [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.127384] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780646, 'name': Rename_Task, 'duration_secs': 0.232007} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.127750] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.127998] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab8358e7-dca1-4333-91dd-f173d5f93bea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.134327] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 961.134327] env[68244]: value = "task-2780650" [ 961.134327] env[68244]: _type = "Task" [ 961.134327] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.137773] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Releasing lock "refresh_cache-54b9144a-f84a-4be2-b6de-c61af436ec4e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.138097] env[68244]: DEBUG nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Instance network_info: |[{"id": "2258be90-64bd-4241-81f3-2cb028b7a8cc", "address": "fa:16:3e:19:65:07", "network": {"id": "773e2ef7-0a29-4903-a18b-8f2d3c804c38", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-677243533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "acbfe945ee0c4cbcbaa57b7a8d10a40a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2258be90-64", "ovs_interfaceid": "2258be90-64bd-4241-81f3-2cb028b7a8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 961.139093] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:65:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2258be90-64bd-4241-81f3-2cb028b7a8cc', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 961.146528] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Creating folder: Project (acbfe945ee0c4cbcbaa57b7a8d10a40a). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 961.147631] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5f67863-e3d6-493c-8441-b796941928f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.153507] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780650, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.163194] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Created folder: Project (acbfe945ee0c4cbcbaa57b7a8d10a40a) in parent group-v558876. [ 961.163408] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Creating folder: Instances. Parent ref: group-v559061. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 961.163657] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33c2ca82-69b7-456d-9743-8acde961ba5c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.175115] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Created folder: Instances in parent group-v559061. [ 961.175504] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.175711] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 961.175930] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7780e4b0-2ad3-4def-b544-dd627fd3b153 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.196074] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 961.196074] env[68244]: value = "task-2780653" [ 961.196074] env[68244]: _type = "Task" [ 961.196074] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.204429] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780653, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.286756] env[68244]: DEBUG nova.compute.manager [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Received event network-changed-2258be90-64bd-4241-81f3-2cb028b7a8cc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 961.287041] env[68244]: DEBUG nova.compute.manager [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Refreshing instance network info cache due to event network-changed-2258be90-64bd-4241-81f3-2cb028b7a8cc. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 961.287227] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] Acquiring lock "refresh_cache-54b9144a-f84a-4be2-b6de-c61af436ec4e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.287368] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] Acquired lock "refresh_cache-54b9144a-f84a-4be2-b6de-c61af436ec4e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.287526] env[68244]: DEBUG nova.network.neutron [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Refreshing network info cache for port 2258be90-64bd-4241-81f3-2cb028b7a8cc {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.330564] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529feba0-8743-aac1-d145-c9d6a838881f, 'name': SearchDatastore_Task, 'duration_secs': 0.008614} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.331226] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a88640ff-769a-4007-87ae-658c9b716562 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.337000] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 961.337000] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f8ea8f-f111-cb47-2e7c-a036c03d4f0c" [ 961.337000] env[68244]: _type = "Task" [ 961.337000] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.344712] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f8ea8f-f111-cb47-2e7c-a036c03d4f0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.346208] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: aa7c6967-cd55-47fc-a2f5-db6e8d2e0307] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 961.431357] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.431602] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Processing image 65d86da3-59fb-4ec7-873b-2525143225e1 {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.433729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1/65d86da3-59fb-4ec7-873b-2525143225e1.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.433729] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1/65d86da3-59fb-4ec7-873b-2525143225e1.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.433729] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.433729] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee61c493-dffc-481c-8033-e3f2926094dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.444202] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.444202] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.444431] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-795d3424-8187-4c9d-947f-1708980faeff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.449817] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 961.449817] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e90a63-05d2-86f7-5122-296e4f626f81" [ 961.449817] env[68244]: _type = "Task" [ 961.449817] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.459414] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e90a63-05d2-86f7-5122-296e4f626f81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.479849] env[68244]: DEBUG nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 961.508775] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.509113] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.509229] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.509438] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.510295] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.510295] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.510295] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.510295] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.510295] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.510621] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.510621] env[68244]: DEBUG nova.virt.hardware [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.511469] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659399ce-03a8-445f-8486-40d78b67b6bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.520468] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ee0586-659b-4b53-8036-aa409a7b920c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.622395] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.625362] env[68244]: DEBUG oslo_vmware.api [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Task: {'id': task-2780649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156309} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.627230] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.592s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.627230] env[68244]: DEBUG nova.objects.instance [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 961.632457] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.633080] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.635232] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.635232] env[68244]: INFO nova.compute.manager [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Took 1.64 seconds to destroy the instance on the hypervisor. [ 961.635232] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.635649] env[68244]: DEBUG nova.compute.manager [-] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.635956] env[68244]: DEBUG nova.network.neutron [-] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.657301] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780650, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.659012] env[68244]: INFO nova.scheduler.client.report [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted allocations for instance 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c [ 961.700116] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.701381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.701381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.717589] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780653, 'name': CreateVM_Task, 'duration_secs': 0.509097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.721387] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.723194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.723535] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.724015] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.725281] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52a805a3-a488-4fff-a049-cdbcbb80e602 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.732942] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 961.732942] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d1534e-ddda-36a8-13b0-b32bc3dc1338" [ 961.732942] env[68244]: _type = "Task" [ 961.732942] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.744663] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d1534e-ddda-36a8-13b0-b32bc3dc1338, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.852401] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 59b0dd89-0093-4e50-9428-8db5c7fd429d] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 961.856000] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f8ea8f-f111-cb47-2e7c-a036c03d4f0c, 'name': SearchDatastore_Task, 'duration_secs': 0.013349} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.856000] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.856000] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. {{(pid=68244) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 961.856000] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dddcf90-45f6-4ec7-b428-6cc8b76bafc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.868025] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 961.868025] env[68244]: value = "task-2780654" [ 961.868025] env[68244]: _type = "Task" [ 961.868025] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.877431] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.963927] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Preparing fetch location {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 961.964575] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Fetch image to [datastore2] OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94/OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94.vmdk {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 961.964849] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Downloading stream optimized image 65d86da3-59fb-4ec7-873b-2525143225e1 to [datastore2] OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94/OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94.vmdk on the data store datastore2 as vApp {{(pid=68244) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 961.965063] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Downloading image file data 65d86da3-59fb-4ec7-873b-2525143225e1 to the ESX as VM named 'OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94' {{(pid=68244) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 962.059421] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 962.059421] env[68244]: value = "resgroup-9" [ 962.059421] env[68244]: _type = "ResourcePool" [ 962.059421] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 962.059623] env[68244]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b795f139-99a5-4d92-a0ca-9d8c7b50a38d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.085152] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease: (returnval){ [ 962.085152] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a62d30-9e3b-4dbd-7995-925f5047d1d4" [ 962.085152] env[68244]: _type = "HttpNfcLease" [ 962.085152] env[68244]: } obtained for vApp import into resource pool (val){ [ 962.085152] env[68244]: value = "resgroup-9" [ 962.085152] env[68244]: _type = "ResourcePool" [ 962.085152] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 962.085404] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the lease: (returnval){ [ 962.085404] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a62d30-9e3b-4dbd-7995-925f5047d1d4" [ 962.085404] env[68244]: _type = "HttpNfcLease" [ 962.085404] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 962.092989] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.092989] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a62d30-9e3b-4dbd-7995-925f5047d1d4" [ 962.092989] env[68244]: _type = "HttpNfcLease" [ 962.092989] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 962.154017] env[68244]: DEBUG oslo_vmware.api [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780650, 'name': PowerOnVM_Task, 'duration_secs': 0.640307} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.154413] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.154718] env[68244]: DEBUG nova.compute.manager [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.155653] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b012af-1d72-4577-aedc-6dafb471ab31 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.174877] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3180309-4ab1-4513-b05c-f8210787ba38 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.418s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.244765] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d1534e-ddda-36a8-13b0-b32bc3dc1338, 'name': SearchDatastore_Task, 'duration_secs': 0.02999} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.245463] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.245463] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.245799] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.245844] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.246021] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.246442] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96a74836-537f-46ac-ac80-3af3e0b607aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.256173] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.256396] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.257356] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd430bae-a711-4d81-ba80-6a3fe758f5cd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.266860] env[68244]: DEBUG nova.network.neutron [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Updated VIF entry in instance network info cache for port 2258be90-64bd-4241-81f3-2cb028b7a8cc. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 962.266860] env[68244]: DEBUG nova.network.neutron [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Updating instance_info_cache with network_info: [{"id": "2258be90-64bd-4241-81f3-2cb028b7a8cc", "address": "fa:16:3e:19:65:07", "network": {"id": "773e2ef7-0a29-4903-a18b-8f2d3c804c38", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-677243533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "acbfe945ee0c4cbcbaa57b7a8d10a40a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2258be90-64", "ovs_interfaceid": "2258be90-64bd-4241-81f3-2cb028b7a8cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.268156] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 962.268156] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bbd8ee-e70b-c4dd-a562-a7c8f69ce352" [ 962.268156] env[68244]: _type = "Task" [ 962.268156] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.280132] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bbd8ee-e70b-c4dd-a562-a7c8f69ce352, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.358885] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d73f87d2-41b3-4396-b5b5-932f8c6bf626] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 962.379584] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780654, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.385318] env[68244]: DEBUG nova.network.neutron [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Successfully updated port: 58a98d40-5d72-49a3-9bec-97e83511b260 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.594105] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.594105] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a62d30-9e3b-4dbd-7995-925f5047d1d4" [ 962.594105] env[68244]: _type = "HttpNfcLease" [ 962.594105] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 962.650041] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7acb4344-9895-46cd-9a76-14c63e98559c tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.650507] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.002s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.651165] env[68244]: DEBUG nova.objects.instance [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lazy-loading 'resources' on Instance uuid b0b79f25-f97d-4d59-ae80-2f8c09201073 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.680414] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.743413] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.743545] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.743752] env[68244]: DEBUG nova.network.neutron [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.769456] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa4ceb47-17b6-440c-adf3-8c97ede068a7 req-01ba4ec7-4e0e-419f-9a0e-59c1bfbc779d service nova] Releasing lock "refresh_cache-54b9144a-f84a-4be2-b6de-c61af436ec4e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.780340] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bbd8ee-e70b-c4dd-a562-a7c8f69ce352, 'name': SearchDatastore_Task, 'duration_secs': 0.021388} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.781155] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69027f13-2eb9-41b7-97a2-cd19d705d6c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.786691] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 962.786691] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526ebec8-bf45-9d75-c95f-2950df54c941" [ 962.786691] env[68244]: _type = "Task" [ 962.786691] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.797254] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526ebec8-bf45-9d75-c95f-2950df54c941, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.862623] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 03af8758-fba3-4173-b998-d9e6b3113f8c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 962.879420] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.693563} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.879653] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. [ 962.880784] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d1039c-e30e-464f-aa05-08d654c2a09d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.901456] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "refresh_cache-b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.901619] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "refresh_cache-b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.901823] env[68244]: DEBUG nova.network.neutron [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.911105] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.914845] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27abf175-9c55-46c0-b817-79f7a970cadd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.935079] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 962.935079] env[68244]: value = "task-2780656" [ 962.935079] env[68244]: _type = "Task" [ 962.935079] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.945018] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.961225] env[68244]: DEBUG nova.network.neutron [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.963235] env[68244]: DEBUG nova.network.neutron [-] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.100232] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 963.100232] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a62d30-9e3b-4dbd-7995-925f5047d1d4" [ 963.100232] env[68244]: _type = "HttpNfcLease" [ 963.100232] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 963.100556] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 963.100556] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a62d30-9e3b-4dbd-7995-925f5047d1d4" [ 963.100556] env[68244]: _type = "HttpNfcLease" [ 963.100556] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 963.101324] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00058d52-44dd-4111-864b-686cd1ca69d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.111106] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52785af7-508d-9846-b0b5-55d2e7ef6c36/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 963.111106] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52785af7-508d-9846-b0b5-55d2e7ef6c36/disk-0.vmdk. {{(pid=68244) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 963.177802] env[68244]: DEBUG nova.network.neutron [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Updating instance_info_cache with network_info: [{"id": "58a98d40-5d72-49a3-9bec-97e83511b260", "address": "fa:16:3e:61:20:36", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a98d40-5d", "ovs_interfaceid": "58a98d40-5d72-49a3-9bec-97e83511b260", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.183595] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d461f1e9-4c34-46b3-ba39-6b0f6383f391 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.297786] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526ebec8-bf45-9d75-c95f-2950df54c941, 'name': SearchDatastore_Task, 'duration_secs': 0.013666} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.298075] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.298414] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 54b9144a-f84a-4be2-b6de-c61af436ec4e/54b9144a-f84a-4be2-b6de-c61af436ec4e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.298655] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f48f5017-7c6c-49f8-b303-04a877265063 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.305322] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 963.305322] env[68244]: value = "task-2780657" [ 963.305322] env[68244]: _type = "Task" [ 963.305322] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.313156] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.336822] env[68244]: DEBUG nova.compute.manager [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Received event network-vif-plugged-58a98d40-5d72-49a3-9bec-97e83511b260 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 963.337079] env[68244]: DEBUG oslo_concurrency.lockutils [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] Acquiring lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.337274] env[68244]: DEBUG oslo_concurrency.lockutils [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.337455] env[68244]: DEBUG oslo_concurrency.lockutils [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.337598] env[68244]: DEBUG nova.compute.manager [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] No waiting events found dispatching network-vif-plugged-58a98d40-5d72-49a3-9bec-97e83511b260 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 963.337750] env[68244]: WARNING nova.compute.manager [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Received unexpected event network-vif-plugged-58a98d40-5d72-49a3-9bec-97e83511b260 for instance with vm_state building and task_state spawning. [ 963.338585] env[68244]: DEBUG nova.compute.manager [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Received event network-changed-58a98d40-5d72-49a3-9bec-97e83511b260 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 963.338585] env[68244]: DEBUG nova.compute.manager [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Refreshing instance network info cache due to event network-changed-58a98d40-5d72-49a3-9bec-97e83511b260. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 963.338585] env[68244]: DEBUG oslo_concurrency.lockutils [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] Acquiring lock "refresh_cache-b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.367928] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 511ab0f1-4ad8-4f8a-bbad-f6d2bf1a1b7f] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 963.452343] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780656, 'name': ReconfigVM_Task, 'duration_secs': 0.336228} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.453983] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Reconfigured VM instance instance-00000044 to attach disk [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.457290] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9c46ec-8275-4916-a642-47e2c34b0913 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.480272] env[68244]: INFO nova.compute.manager [-] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Took 1.84 seconds to deallocate network for instance. [ 963.495454] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd87d9d9-e950-4d3e-9182-a218b3191915 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.524708] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 963.524708] env[68244]: value = "task-2780658" [ 963.524708] env[68244]: _type = "Task" [ 963.524708] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.532297] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780658, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.550770] env[68244]: DEBUG nova.network.neutron [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.635141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "60c502f4-8c4b-433e-ad4f-9351048abe11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.635669] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.680445] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "refresh_cache-b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.680833] env[68244]: DEBUG nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Instance network_info: |[{"id": "58a98d40-5d72-49a3-9bec-97e83511b260", "address": "fa:16:3e:61:20:36", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a98d40-5d", "ovs_interfaceid": "58a98d40-5d72-49a3-9bec-97e83511b260", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 963.681189] env[68244]: DEBUG oslo_concurrency.lockutils [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] Acquired lock "refresh_cache-b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.681392] env[68244]: DEBUG nova.network.neutron [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Refreshing network info cache for port 58a98d40-5d72-49a3-9bec-97e83511b260 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.683477] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:20:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba4f6497-e2b4-43b5-9819-6927865ae974', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58a98d40-5d72-49a3-9bec-97e83511b260', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.693496] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 963.701107] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 963.701695] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b50c4f15-5dca-4dd2-9546-130067046c41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.737777] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.737777] env[68244]: value = "task-2780659" [ 963.737777] env[68244]: _type = "Task" [ 963.737777] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.765231] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780659, 'name': CreateVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.785707] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a627a8-66f5-452d-93f9-03d0ddededdc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.798576] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420db81f-c7db-4819-828f-e36bd2fe6788 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.844604] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e38ead5-7c70-4643-97c6-5ebdae987435 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.850677] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780657, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.856600] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e955f7a6-47bf-4418-9942-46ff430873ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.875372] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 3776b39a-d10b-4068-8b4b-5dc25798e088] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 963.877999] env[68244]: DEBUG nova.compute.provider_tree [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.970788] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Completed reading data from the image iterator. {{(pid=68244) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 963.971051] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52785af7-508d-9846-b0b5-55d2e7ef6c36/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 963.971983] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5775afa-0e59-462a-8b4f-510f26d6c6ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.979739] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52785af7-508d-9846-b0b5-55d2e7ef6c36/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 963.980026] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52785af7-508d-9846-b0b5-55d2e7ef6c36/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 963.980372] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-86b747e6-1378-4d7c-8c82-912ddb214bdb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.014379] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.033281] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780658, 'name': ReconfigVM_Task, 'duration_secs': 0.198765} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.033595] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.033863] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b9ccc81-bd2d-46ab-b971-9bcc35a706e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.042205] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 964.042205] env[68244]: value = "task-2780660" [ 964.042205] env[68244]: _type = "Task" [ 964.042205] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.050356] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780660, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.057018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.140585] env[68244]: DEBUG nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 964.154693] env[68244]: DEBUG oslo_vmware.rw_handles [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52785af7-508d-9846-b0b5-55d2e7ef6c36/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 964.154869] env[68244]: INFO nova.virt.vmwareapi.images [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Downloaded image file data 65d86da3-59fb-4ec7-873b-2525143225e1 [ 964.155770] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a231cd4-972e-4ddf-a7c2-3e05b88672ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.176062] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5503c835-0a0e-4391-9d65-c175ac894ec8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.204922] env[68244]: INFO nova.virt.vmwareapi.images [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] The imported VM was unregistered [ 964.207149] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Caching image {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 964.207387] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating directory with path [datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1 {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.207651] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-651a4259-fb32-437d-a6db-937a813bbe36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.218802] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created directory with path [datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1 {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.218978] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94/OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94.vmdk to [datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1/65d86da3-59fb-4ec7-873b-2525143225e1.vmdk. {{(pid=68244) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 964.219254] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e3b029f9-61c7-4047-a88d-7d467bb8effc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.229051] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 964.229051] env[68244]: value = "task-2780662" [ 964.229051] env[68244]: _type = "Task" [ 964.229051] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.237752] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780662, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.247571] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780659, 'name': CreateVM_Task, 'duration_secs': 0.3691} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.250562] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 964.250955] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.251217] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.251429] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.251634] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.251829] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.253752] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.253930] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.254280] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 964.255096] env[68244]: INFO nova.compute.manager [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Terminating instance [ 964.256618] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7994c812-6ec1-48d2-a2c3-57dd2ffad290 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.264513] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 964.264513] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d3d773-933b-eec0-ce8b-9575bb8e79eb" [ 964.264513] env[68244]: _type = "Task" [ 964.264513] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.275377] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d3d773-933b-eec0-ce8b-9575bb8e79eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.317784] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616645} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.317784] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 54b9144a-f84a-4be2-b6de-c61af436ec4e/54b9144a-f84a-4be2-b6de-c61af436ec4e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 964.318176] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 964.318307] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-647cf5aa-31f8-4cb9-831e-54e84c52be42 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.325493] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 964.325493] env[68244]: value = "task-2780663" [ 964.325493] env[68244]: _type = "Task" [ 964.325493] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.333843] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780663, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.386606] env[68244]: DEBUG nova.scheduler.client.report [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.390309] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 8b02c3c1-b5e9-41c1-a594-c2b0f0aa4a9c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 964.449524] env[68244]: DEBUG nova.network.neutron [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Updated VIF entry in instance network info cache for port 58a98d40-5d72-49a3-9bec-97e83511b260. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.449663] env[68244]: DEBUG nova.network.neutron [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Updating instance_info_cache with network_info: [{"id": "58a98d40-5d72-49a3-9bec-97e83511b260", "address": "fa:16:3e:61:20:36", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a98d40-5d", "ovs_interfaceid": "58a98d40-5d72-49a3-9bec-97e83511b260", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.556837] env[68244]: DEBUG oslo_vmware.api [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780660, 'name': PowerOnVM_Task, 'duration_secs': 0.491263} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.557019] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.560980] env[68244]: DEBUG nova.compute.manager [None req-c573429d-ed14-493b-99bc-fc358d61eef6 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.566621] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0880887-39ad-4e9e-9e08-80325575ef14 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.593900] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5265c6-9b92-4544-8fc8-aa9d7d9dd6a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.614264] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70862e0b-4ddc-4d15-a6f1-6b57f73e3184 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.623216] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance 'e8655168-1fe8-4590-90a3-2ad9438d7761' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 964.677018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.739915] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780662, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.764036] env[68244]: DEBUG nova.compute.manager [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 964.764036] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.764036] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33949643-f306-4582-8aad-edb2d7d000d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.776169] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.776746] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f063d68-7dd7-438e-8a58-bfa12cf9b941 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.784143] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d3d773-933b-eec0-ce8b-9575bb8e79eb, 'name': SearchDatastore_Task, 'duration_secs': 0.013305} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.784923] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.785228] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 964.785473] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.785615] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.785784] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.786133] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-571a9b21-1ece-4e51-9076-764b9e1ff899 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.790018] env[68244]: DEBUG oslo_vmware.api [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 964.790018] env[68244]: value = "task-2780664" [ 964.790018] env[68244]: _type = "Task" [ 964.790018] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.798538] env[68244]: DEBUG oslo_vmware.api [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.807542] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.807542] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 964.808506] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-138c70f3-b539-482e-9a37-8b983a9aa34a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.818947] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 964.818947] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e62dda-80ac-1b05-0e7f-9bf35edd8135" [ 964.818947] env[68244]: _type = "Task" [ 964.818947] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.831291] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e62dda-80ac-1b05-0e7f-9bf35edd8135, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.844048] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780663, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102681} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.844048] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 964.844465] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62689fb4-a8cf-408b-a592-fc6ccd07e177 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.873956] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 54b9144a-f84a-4be2-b6de-c61af436ec4e/54b9144a-f84a-4be2-b6de-c61af436ec4e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 964.874371] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c8bd591-e27f-4d56-8f07-724e46dccf30 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.894461] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.896916] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d81bdefa-9c23-413b-9670-bbb2139084f7] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 964.901751] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.829s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.901944] env[68244]: DEBUG nova.objects.instance [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lazy-loading 'resources' on Instance uuid ce2c5992-690a-4ab4-8dc1-86d99f8ca647 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.903355] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 964.903355] env[68244]: value = "task-2780665" [ 964.903355] env[68244]: _type = "Task" [ 964.903355] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.918425] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780665, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.923340] env[68244]: INFO nova.scheduler.client.report [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Deleted allocations for instance b0b79f25-f97d-4d59-ae80-2f8c09201073 [ 964.954987] env[68244]: DEBUG oslo_concurrency.lockutils [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] Releasing lock "refresh_cache-b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.956118] env[68244]: DEBUG nova.compute.manager [req-96b43760-c256-40af-b3f5-24bb9a6d794d req-2ad35b80-2499-493f-b752-e07d10baf3ba service nova] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Received event network-vif-deleted-86448281-b3d4-4132-8a5e-1a366a1132e0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 965.131507] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a590f5-1edd-4e59-b16b-5f832d791b3a tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance 'e8655168-1fe8-4590-90a3-2ad9438d7761' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 965.244107] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780662, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.305921] env[68244]: DEBUG oslo_vmware.api [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780664, 'name': PowerOffVM_Task, 'duration_secs': 0.325665} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.306389] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.306551] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.306813] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-527f7f18-c671-4f90-97a7-09aece39b756 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.331094] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e62dda-80ac-1b05-0e7f-9bf35edd8135, 'name': SearchDatastore_Task, 'duration_secs': 0.086315} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.332376] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4779cf2-8887-4a83-beb2-9c11cb3a319f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.339311] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 965.339311] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526d7588-7dd0-fa9a-d27f-db4ab2cbcc34" [ 965.339311] env[68244]: _type = "Task" [ 965.339311] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.350232] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526d7588-7dd0-fa9a-d27f-db4ab2cbcc34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.390520] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.390684] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.390874] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleting the datastore file [datastore1] 183ac01e-82b1-470e-9e8f-a8aefb4c64c3 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.391161] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-748ceba8-fed3-4647-8769-f6083b76615d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.404412] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 23f2ad6c-ea98-4a32-a79a-75cec6fc925e] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 965.406932] env[68244]: DEBUG oslo_vmware.api [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 965.406932] env[68244]: value = "task-2780667" [ 965.406932] env[68244]: _type = "Task" [ 965.406932] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.423500] env[68244]: DEBUG oslo_vmware.api [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.428355] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780665, 'name': ReconfigVM_Task, 'duration_secs': 0.439275} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.429232] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 54b9144a-f84a-4be2-b6de-c61af436ec4e/54b9144a-f84a-4be2-b6de-c61af436ec4e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 965.437309] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f2a97fc-af70-4bcd-9b39-009e929de17e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.437309] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51af5a87-e843-4058-8e0a-0e8eb14908b8 tempest-ListServerFiltersTestJSON-1468642307 tempest-ListServerFiltersTestJSON-1468642307-project-member] Lock "b0b79f25-f97d-4d59-ae80-2f8c09201073" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.308s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.447788] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 965.447788] env[68244]: value = "task-2780668" [ 965.447788] env[68244]: _type = "Task" [ 965.447788] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.465814] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780668, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.748284] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780662, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.858949] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526d7588-7dd0-fa9a-d27f-db4ab2cbcc34, 'name': SearchDatastore_Task, 'duration_secs': 0.102042} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.862633] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.865033] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8/b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 965.865033] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f70e60d3-de2d-4a8d-bbb3-226a4a52369f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.877261] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 965.877261] env[68244]: value = "task-2780669" [ 965.877261] env[68244]: _type = "Task" [ 965.877261] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.892392] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.907973] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: aebd1200-ae52-4537-a677-24b57b581517] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 965.912120] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c2a4d8-16b4-4bd5-99c1-1d0c77f705f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.927834] env[68244]: DEBUG oslo_vmware.api [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278702} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.930639] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.930979] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.931750] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.931750] env[68244]: INFO nova.compute.manager [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Took 1.17 seconds to destroy the instance on the hypervisor. [ 965.931750] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.932176] env[68244]: DEBUG nova.compute.manager [-] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 965.932318] env[68244]: DEBUG nova.network.neutron [-] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.936070] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c855c1-0458-4b2a-879e-1d3d24b6730a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.989360] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2d4148-8f34-4035-bc35-19eb6e848a39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.993389] env[68244]: DEBUG nova.compute.manager [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 965.993503] env[68244]: DEBUG nova.compute.manager [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing instance network info cache due to event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 965.993675] env[68244]: DEBUG oslo_concurrency.lockutils [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] Acquiring lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.993820] env[68244]: DEBUG oslo_concurrency.lockutils [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] Acquired lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.993977] env[68244]: DEBUG nova.network.neutron [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 966.001975] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4677adc1-1c22-442d-900b-ba1677749c9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.010186] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780668, 'name': Rename_Task, 'duration_secs': 0.211961} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.010936] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.011431] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbc09330-5dbe-4f42-a38f-8a46c1c5814d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.024319] env[68244]: DEBUG nova.compute.provider_tree [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.034936] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 966.034936] env[68244]: value = "task-2780670" [ 966.034936] env[68244]: _type = "Task" [ 966.034936] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.048725] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.247489] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780662, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.393137] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.418047] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: c662b964-abc9-41af-85fd-ea1a540e1e23] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 966.428698] env[68244]: DEBUG nova.compute.manager [req-38789633-197a-4386-b57c-9501cfea46fd req-1730e9c8-559d-4632-8f8e-1e758928459f service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Received event network-vif-deleted-c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 966.428880] env[68244]: INFO nova.compute.manager [req-38789633-197a-4386-b57c-9501cfea46fd req-1730e9c8-559d-4632-8f8e-1e758928459f service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Neutron deleted interface c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6; detaching it from the instance and deleting it from the info cache [ 966.429071] env[68244]: DEBUG nova.network.neutron [req-38789633-197a-4386-b57c-9501cfea46fd req-1730e9c8-559d-4632-8f8e-1e758928459f service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.527754] env[68244]: DEBUG nova.scheduler.client.report [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.548827] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780670, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.710981] env[68244]: DEBUG nova.network.neutron [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updated VIF entry in instance network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 966.710981] env[68244]: DEBUG nova.network.neutron [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.744818] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780662, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.510493} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.744818] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94/OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94.vmdk to [datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1/65d86da3-59fb-4ec7-873b-2525143225e1.vmdk. [ 966.744818] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Cleaning up location [datastore2] OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94 {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 966.744818] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_cf8c9ca4-dca7-495f-9f0a-16d311b9bb94 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.744818] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f44112e4-9930-48ce-a6d7-5f843cb48fed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.751541] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 966.751541] env[68244]: value = "task-2780671" [ 966.751541] env[68244]: _type = "Task" [ 966.751541] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.760292] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.772823] env[68244]: DEBUG nova.network.neutron [-] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.894319] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780669, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.921200] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ffee3ad0-2fca-4c2c-9fa4-9218ef2e1207] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 966.931798] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3b5ef53-6a4d-426b-b403-c232df39cc21 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.945204] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91b35e3-5bfc-4d0e-81bc-c2d72edbad67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.989272] env[68244]: DEBUG nova.compute.manager [req-38789633-197a-4386-b57c-9501cfea46fd req-1730e9c8-559d-4632-8f8e-1e758928459f service nova] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Detach interface failed, port_id=c7cbfafe-e9d1-4cfb-84e1-8d6d1942eae6, reason: Instance 183ac01e-82b1-470e-9e8f-a8aefb4c64c3 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 967.035592] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.134s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.038300] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.653s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.040096] env[68244]: INFO nova.compute.claims [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 967.056362] env[68244]: DEBUG oslo_vmware.api [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780670, 'name': PowerOnVM_Task, 'duration_secs': 0.626414} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.056700] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 967.056935] env[68244]: INFO nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Took 8.24 seconds to spawn the instance on the hypervisor. [ 967.057107] env[68244]: DEBUG nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 967.057946] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c657ac-5245-4b82-878c-e2bd84ee3392 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.061798] env[68244]: INFO nova.scheduler.client.report [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Deleted allocations for instance ce2c5992-690a-4ab4-8dc1-86d99f8ca647 [ 967.214266] env[68244]: DEBUG oslo_concurrency.lockutils [req-64e29099-866f-418f-b2d8-20622610207f req-32fad2c2-47ef-4aaf-8423-b5c355040123 service nova] Releasing lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.265131] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07946} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.265131] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.265131] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1/65d86da3-59fb-4ec7-873b-2525143225e1.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.265497] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1/65d86da3-59fb-4ec7-873b-2525143225e1.vmdk to [datastore2] 91d45b22-7963-4615-8455-7d910a9a0fed/91d45b22-7963-4615-8455-7d910a9a0fed.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.265642] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-901dbe56-4045-4939-909c-4a1e59640753 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.273447] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 967.273447] env[68244]: value = "task-2780672" [ 967.273447] env[68244]: _type = "Task" [ 967.273447] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.275886] env[68244]: INFO nova.compute.manager [-] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Took 1.34 seconds to deallocate network for instance. [ 967.285876] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.390068] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780669, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.266592} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.390346] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8/b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.390567] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.390840] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53cf6296-4c54-4d66-a1b8-d55c34a22850 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.397272] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 967.397272] env[68244]: value = "task-2780673" [ 967.397272] env[68244]: _type = "Task" [ 967.397272] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.405848] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780673, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.426853] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 57504eac-0d7f-4fbe-b08c-6864713cca94] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 967.579264] env[68244]: DEBUG oslo_concurrency.lockutils [None req-48b2d38b-19d0-4d3d-9adf-d8fe920807f9 tempest-ImagesOneServerNegativeTestJSON-1834217457 tempest-ImagesOneServerNegativeTestJSON-1834217457-project-member] Lock "ce2c5992-690a-4ab4-8dc1-86d99f8ca647" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.133s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.582087] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.582319] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.583532] env[68244]: INFO nova.compute.manager [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Took 37.37 seconds to build instance. [ 967.584798] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "e8655168-1fe8-4590-90a3-2ad9438d7761" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.585017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.585612] env[68244]: DEBUG nova.compute.manager [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Going to confirm migration 4 {{(pid=68244) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 967.781962] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.787056] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.906739] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780673, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.310964} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.907016] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.907803] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9432f6c6-e5f9-4da3-a4ad-98b3cae49244 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.930472] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8/b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.930942] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 3a4e045e-8e27-45e4-9c90-8aa16298a096] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 967.936028] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfed2b01-ae13-44bd-a072-8cfb72e3362e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.956055] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 967.956055] env[68244]: value = "task-2780674" [ 967.956055] env[68244]: _type = "Task" [ 967.956055] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.966778] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780674, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.027228] env[68244]: DEBUG nova.compute.manager [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 968.027228] env[68244]: DEBUG nova.compute.manager [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing instance network info cache due to event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 968.027228] env[68244]: DEBUG oslo_concurrency.lockutils [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] Acquiring lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.027228] env[68244]: DEBUG oslo_concurrency.lockutils [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] Acquired lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.028105] env[68244]: DEBUG nova.network.neutron [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.087419] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9f6b5210-b9f7-49ca-89f1-379c6bd1429e tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.880s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.087754] env[68244]: DEBUG nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 968.158178] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.158542] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.158855] env[68244]: DEBUG nova.network.neutron [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.159034] env[68244]: DEBUG nova.objects.instance [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'info_cache' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.287263] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.451831] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 47330950-506d-41c7-b564-30f46a7025a7] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 968.477938] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.584153] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25208f8a-c42c-43ca-b1ab-350ad984706b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.605517] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f98395-6013-48e4-93f2-6691890a7020 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.645847] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.650020] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1bab39-36ed-4ef8-940d-4df3763a5571 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.660803] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1768097f-d005-4ef9-b0c4-0a59f8626493 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.682596] env[68244]: DEBUG nova.compute.provider_tree [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.790617] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.818959] env[68244]: DEBUG nova.network.neutron [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updated VIF entry in instance network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 968.819365] env[68244]: DEBUG nova.network.neutron [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.960998] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f48156b9-0316-4a9c-9cf0-9dd9d7a932c1] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 968.978225] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.186796] env[68244]: DEBUG nova.scheduler.client.report [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.291413] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.321935] env[68244]: DEBUG oslo_concurrency.lockutils [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] Releasing lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.322286] env[68244]: DEBUG nova.compute.manager [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 969.322540] env[68244]: DEBUG nova.compute.manager [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing instance network info cache due to event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 969.322704] env[68244]: DEBUG oslo_concurrency.lockutils [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] Acquiring lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.322850] env[68244]: DEBUG oslo_concurrency.lockutils [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] Acquired lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.323027] env[68244]: DEBUG nova.network.neutron [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.428133] env[68244]: DEBUG nova.network.neutron [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.468170] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.468467] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Cleaning up deleted instances with incomplete migration {{(pid=68244) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 969.476541] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780674, 'name': ReconfigVM_Task, 'duration_secs': 1.12703} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.476871] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Reconfigured VM instance instance-00000047 to attach disk [datastore2] b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8/b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.477585] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-709b72f3-03e4-4c63-8a27-08d2337134eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.488340] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 969.488340] env[68244]: value = "task-2780675" [ 969.488340] env[68244]: _type = "Task" [ 969.488340] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.503338] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780675, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.693563] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.694184] env[68244]: DEBUG nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 969.701081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 9.987s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.790346] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.933833] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.934135] env[68244]: DEBUG nova.objects.instance [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'migration_context' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.003712] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780675, 'name': Rename_Task, 'duration_secs': 0.445715} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.003712] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.003712] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ecb1325-0cc8-4b6a-a417-edb595b757f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.014898] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 970.014898] env[68244]: value = "task-2780676" [ 970.014898] env[68244]: _type = "Task" [ 970.014898] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.029040] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780676, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.090978] env[68244]: DEBUG nova.network.neutron [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updated VIF entry in instance network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.091382] env[68244]: DEBUG nova.network.neutron [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.205472] env[68244]: DEBUG nova.compute.utils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 970.213459] env[68244]: DEBUG nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 970.213904] env[68244]: DEBUG nova.network.neutron [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.282734] env[68244]: DEBUG nova.policy [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e027c26875c4539ac586d74d426f049', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea4277fe2cb0423496d8f74d02b9f26a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 970.296681] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.437397] env[68244]: DEBUG nova.objects.base [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 970.438432] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0a9ce6-2806-4eba-9997-acdbeeccead5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.461691] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08a21a78-7771-437f-af3d-821dea557d04 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.467735] env[68244]: DEBUG oslo_vmware.api [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 970.467735] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52289cf1-7baa-b612-41f2-0fcd7bc7669e" [ 970.467735] env[68244]: _type = "Task" [ 970.467735] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.477753] env[68244]: DEBUG oslo_vmware.api [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52289cf1-7baa-b612-41f2-0fcd7bc7669e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.479338] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.514343] env[68244]: DEBUG nova.compute.manager [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 970.514645] env[68244]: DEBUG nova.compute.manager [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing instance network info cache due to event network-changed-9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 970.514854] env[68244]: DEBUG oslo_concurrency.lockutils [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] Acquiring lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.529949] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780676, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.594187] env[68244]: DEBUG oslo_concurrency.lockutils [req-17b630d3-c4c9-42c0-a438-190654bd2e9c req-cd308176-7b21-44a2-abf4-f94cf3971169 service nova] Releasing lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.595254] env[68244]: DEBUG oslo_concurrency.lockutils [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] Acquired lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.595964] env[68244]: DEBUG nova.network.neutron [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Refreshing network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 970.608734] env[68244]: DEBUG nova.network.neutron [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Successfully created port: 8cd0da80-092a-4e78-9a24-619080b3678d {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.715064] env[68244]: DEBUG nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.733260] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f9b16f-bdc0-4e20-9d07-e000ef982776 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.740868] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac26d9f-2d8d-4deb-8fd5-b2eccd3e218f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.775428] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14009d1-d9d9-4198-9f01-823203bc52e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.789234] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6cba17-a5a6-4d73-9de7-b7dab92fc16e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.793510] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780672, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.122776} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.793764] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/65d86da3-59fb-4ec7-873b-2525143225e1/65d86da3-59fb-4ec7-873b-2525143225e1.vmdk to [datastore2] 91d45b22-7963-4615-8455-7d910a9a0fed/91d45b22-7963-4615-8455-7d910a9a0fed.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 970.795094] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ce9549-7c8e-4312-a3bc-4711a4f94551 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.805643] env[68244]: DEBUG nova.compute.provider_tree [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.830488] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 91d45b22-7963-4615-8455-7d910a9a0fed/91d45b22-7963-4615-8455-7d910a9a0fed.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.831336] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b541eef7-cea3-4436-846e-8f1948e84617 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.857370] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 970.857370] env[68244]: value = "task-2780677" [ 970.857370] env[68244]: _type = "Task" [ 970.857370] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.868737] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780677, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.982021] env[68244]: DEBUG oslo_vmware.api [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52289cf1-7baa-b612-41f2-0fcd7bc7669e, 'name': SearchDatastore_Task, 'duration_secs': 0.013913} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.982021] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.033293] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780676, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.309483] env[68244]: DEBUG nova.scheduler.client.report [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.330214] env[68244]: DEBUG nova.network.neutron [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updated VIF entry in instance network info cache for port 9b1a2db2-4410-496c-9c02-c9af80c39755. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 971.330592] env[68244]: DEBUG nova.network.neutron [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [{"id": "9b1a2db2-4410-496c-9c02-c9af80c39755", "address": "fa:16:3e:45:51:d1", "network": {"id": "68afdf5f-3d08-4551-920d-5f6e7adaa33e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-377481721-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c9a14e7ecf7e42d086738ccc845abeff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f499bc9-78da-46c1-9274-19edf26d31cb", "external-id": "nsx-vlan-transportzone-243", "segmentation_id": 243, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b1a2db2-44", "ovs_interfaceid": "9b1a2db2-4410-496c-9c02-c9af80c39755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.368024] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.528745] env[68244]: DEBUG oslo_vmware.api [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780676, 'name': PowerOnVM_Task, 'duration_secs': 1.358194} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.529229] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.529839] env[68244]: INFO nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Took 10.05 seconds to spawn the instance on the hypervisor. [ 971.530086] env[68244]: DEBUG nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.531017] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7d36e9-6d94-419a-91e7-3e6e5143c403 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.730984] env[68244]: DEBUG nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 971.758898] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.759192] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.759360] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.759543] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.759692] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.759840] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.760061] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.760241] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.760427] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.760597] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.760767] env[68244]: DEBUG nova.virt.hardware [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.761659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ade37d-4ebf-448e-995d-c9f5aae98e72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.770196] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1785a16b-dc4d-44d1-89cb-bbfca7269109 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.833097] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.834075] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.834075] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.834075] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.834292] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.836666] env[68244]: DEBUG oslo_concurrency.lockutils [req-1d430e9d-ee7b-4bb8-a53c-ff2628ca3136 req-9fd504e5-9372-4b8a-97c0-e334a8c211a9 service nova] Releasing lock "refresh_cache-d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.838042] env[68244]: INFO nova.compute.manager [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Terminating instance [ 971.871013] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780677, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.053290] env[68244]: INFO nova.compute.manager [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Took 37.19 seconds to build instance. [ 972.210032] env[68244]: DEBUG nova.compute.manager [req-0075c935-a25b-45ed-abaf-1a14f2a889e4 req-29193ea9-c87b-4bc3-908c-0a3080723d6a service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Received event network-vif-plugged-8cd0da80-092a-4e78-9a24-619080b3678d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 972.210032] env[68244]: DEBUG oslo_concurrency.lockutils [req-0075c935-a25b-45ed-abaf-1a14f2a889e4 req-29193ea9-c87b-4bc3-908c-0a3080723d6a service nova] Acquiring lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.210821] env[68244]: DEBUG oslo_concurrency.lockutils [req-0075c935-a25b-45ed-abaf-1a14f2a889e4 req-29193ea9-c87b-4bc3-908c-0a3080723d6a service nova] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.212444] env[68244]: DEBUG oslo_concurrency.lockutils [req-0075c935-a25b-45ed-abaf-1a14f2a889e4 req-29193ea9-c87b-4bc3-908c-0a3080723d6a service nova] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.212444] env[68244]: DEBUG nova.compute.manager [req-0075c935-a25b-45ed-abaf-1a14f2a889e4 req-29193ea9-c87b-4bc3-908c-0a3080723d6a service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] No waiting events found dispatching network-vif-plugged-8cd0da80-092a-4e78-9a24-619080b3678d {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 972.212444] env[68244]: WARNING nova.compute.manager [req-0075c935-a25b-45ed-abaf-1a14f2a889e4 req-29193ea9-c87b-4bc3-908c-0a3080723d6a service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Received unexpected event network-vif-plugged-8cd0da80-092a-4e78-9a24-619080b3678d for instance with vm_state building and task_state spawning. [ 972.241168] env[68244]: DEBUG nova.network.neutron [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Successfully updated port: 8cd0da80-092a-4e78-9a24-619080b3678d {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.324227] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.627s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.327027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 12.354s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.345021] env[68244]: DEBUG nova.compute.manager [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 972.345280] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.346505] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2c18da-0062-450c-b3bf-6fab559a94f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.355813] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.356088] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41bc6aab-7d1a-46dc-9ce4-d49d29cafeb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.364820] env[68244]: DEBUG oslo_vmware.api [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 972.364820] env[68244]: value = "task-2780678" [ 972.364820] env[68244]: _type = "Task" [ 972.364820] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.372359] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780677, 'name': ReconfigVM_Task, 'duration_secs': 1.031116} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.373046] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 91d45b22-7963-4615-8455-7d910a9a0fed/91d45b22-7963-4615-8455-7d910a9a0fed.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.377949] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa2874a6-4596-48da-9f22-4f99c0d29343 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.380812] env[68244]: DEBUG oslo_vmware.api [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.386148] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 972.386148] env[68244]: value = "task-2780679" [ 972.386148] env[68244]: _type = "Task" [ 972.386148] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.395484] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780679, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.531082] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.562760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b3238cca-d080-422c-b792-6c037adb6938 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.713s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.562760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.032s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.562760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.562760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.562760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.566702] env[68244]: INFO nova.compute.manager [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Terminating instance [ 972.746496] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "refresh_cache-4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.746604] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquired lock "refresh_cache-4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.746768] env[68244]: DEBUG nova.network.neutron [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.799336] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "54b9144a-f84a-4be2-b6de-c61af436ec4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.799631] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.801146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "54b9144a-f84a-4be2-b6de-c61af436ec4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.801146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.801146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.802346] env[68244]: INFO nova.compute.manager [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Terminating instance [ 972.831800] env[68244]: DEBUG nova.objects.instance [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lazy-loading 'migration_context' on Instance uuid d74a0d56-8656-429c-a703-fca87e07798f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.874738] env[68244]: DEBUG oslo_vmware.api [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780678, 'name': PowerOffVM_Task, 'duration_secs': 0.303944} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.874996] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.875188] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.875437] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d366e38e-d9f0-4f38-b7bd-a0a667b2775e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.886081] env[68244]: INFO nova.scheduler.client.report [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted allocation for migration 2764fb14-ee76-4821-a9aa-cb31716b24d6 [ 972.898136] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780679, 'name': Rename_Task, 'duration_secs': 0.174451} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.898470] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.899408] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50c5fa2f-dffc-4eba-b255-3b3d02ec71a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.905513] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 972.905513] env[68244]: value = "task-2780681" [ 972.905513] env[68244]: _type = "Task" [ 972.905513] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.915415] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.945791] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.945791] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.945791] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Deleting the datastore file [datastore2] d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.945791] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecc97f81-25b2-46b4-a095-ead46b416383 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.952132] env[68244]: DEBUG oslo_vmware.api [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for the task: (returnval){ [ 972.952132] env[68244]: value = "task-2780682" [ 972.952132] env[68244]: _type = "Task" [ 972.952132] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.960144] env[68244]: DEBUG oslo_vmware.api [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.072036] env[68244]: DEBUG nova.compute.manager [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.072375] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.073258] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a411417b-ffac-44e8-852f-49789d84824b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.081227] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.081420] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-401903fa-9619-46a4-a9e7-73d9af5c614b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.088269] env[68244]: DEBUG oslo_vmware.api [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 973.088269] env[68244]: value = "task-2780683" [ 973.088269] env[68244]: _type = "Task" [ 973.088269] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.097163] env[68244]: DEBUG oslo_vmware.api [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.281197] env[68244]: DEBUG nova.network.neutron [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.306509] env[68244]: DEBUG nova.compute.manager [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.306762] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.307678] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf833e7-d39e-4819-923c-fb6bb846fbca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.316345] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.316345] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-171c5dfe-cd5f-49a2-b82c-51f6a0cbc7e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.322675] env[68244]: DEBUG oslo_vmware.api [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 973.322675] env[68244]: value = "task-2780684" [ 973.322675] env[68244]: _type = "Task" [ 973.322675] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.333765] env[68244]: DEBUG oslo_vmware.api [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780684, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.394135] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddf8d56b-7872-4bc4-b87d-37fde980eb36 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.253s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.419313] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780681, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.435671] env[68244]: DEBUG nova.network.neutron [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Updating instance_info_cache with network_info: [{"id": "8cd0da80-092a-4e78-9a24-619080b3678d", "address": "fa:16:3e:7d:d8:f5", "network": {"id": "c1bbde04-6f70-48c3-80dd-337f42661747", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1702248020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea4277fe2cb0423496d8f74d02b9f26a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cd0da80-09", "ovs_interfaceid": "8cd0da80-092a-4e78-9a24-619080b3678d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.464804] env[68244]: DEBUG oslo_vmware.api [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Task: {'id': task-2780682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.36675} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.465122] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.465343] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.465623] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.465767] env[68244]: INFO nova.compute.manager [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Took 1.12 seconds to destroy the instance on the hypervisor. [ 973.466163] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.466385] env[68244]: DEBUG nova.compute.manager [-] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 973.466533] env[68244]: DEBUG nova.network.neutron [-] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 973.597995] env[68244]: DEBUG oslo_vmware.api [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780683, 'name': PowerOffVM_Task, 'duration_secs': 0.187955} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.598341] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.598520] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.601164] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34f9677f-dfe1-489a-8d78-019e2b4ca137 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.667700] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.668203] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.668576] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleting the datastore file [datastore2] b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.672688] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ef54a50-30de-4ab6-ae5d-268240170eba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.680083] env[68244]: DEBUG oslo_vmware.api [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 973.680083] env[68244]: value = "task-2780686" [ 973.680083] env[68244]: _type = "Task" [ 973.680083] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.687898] env[68244]: DEBUG oslo_vmware.api [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.834034] env[68244]: DEBUG oslo_vmware.api [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780684, 'name': PowerOffVM_Task, 'duration_secs': 0.171226} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.834346] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.834564] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.834869] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d04084b2-e3b3-424b-9b18-776ee5a176bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.837919] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772768ae-b239-4842-9c78-348b0f32e4d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.847527] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185e7c2c-c6e1-4fa9-8560-d52c65a823d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.884021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c4ed71-de3b-4baa-b326-ae75a56ad1a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.890949] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eeddaf4-bdec-43d1-a94d-74c11a0453ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.905240] env[68244]: DEBUG nova.compute.provider_tree [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.907657] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.907856] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.908046] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Deleting the datastore file [datastore1] 54b9144a-f84a-4be2-b6de-c61af436ec4e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.912022] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58465318-c3c7-4777-a114-9ecbf2278867 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.917936] env[68244]: DEBUG oslo_vmware.api [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780681, 'name': PowerOnVM_Task, 'duration_secs': 0.657518} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.919381] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 973.919381] env[68244]: INFO nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Took 17.83 seconds to spawn the instance on the hypervisor. [ 973.919565] env[68244]: DEBUG nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.919828] env[68244]: DEBUG oslo_vmware.api [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for the task: (returnval){ [ 973.919828] env[68244]: value = "task-2780688" [ 973.919828] env[68244]: _type = "Task" [ 973.919828] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.920526] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c63bda-f238-4942-81e9-d242f2305b15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.936317] env[68244]: DEBUG oslo_vmware.api [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.942215] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Releasing lock "refresh_cache-4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.942525] env[68244]: DEBUG nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Instance network_info: |[{"id": "8cd0da80-092a-4e78-9a24-619080b3678d", "address": "fa:16:3e:7d:d8:f5", "network": {"id": "c1bbde04-6f70-48c3-80dd-337f42661747", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1702248020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea4277fe2cb0423496d8f74d02b9f26a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cd0da80-09", "ovs_interfaceid": "8cd0da80-092a-4e78-9a24-619080b3678d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 973.944020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:d8:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2153f70-3d14-42ab-8bb3-be78296dd3b8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8cd0da80-092a-4e78-9a24-619080b3678d', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.951846] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Creating folder: Project (ea4277fe2cb0423496d8f74d02b9f26a). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.952149] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c89d4d90-d85a-48fc-837d-6f8f90794980 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.963842] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Created folder: Project (ea4277fe2cb0423496d8f74d02b9f26a) in parent group-v558876. [ 973.963842] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Creating folder: Instances. Parent ref: group-v559066. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.963842] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2adfa7e-6653-40a0-9f6f-04ab084e9c3c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.974284] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Created folder: Instances in parent group-v559066. [ 973.974622] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.974768] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.974939] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7acd76c1-0103-475e-9c1a-006a354399db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.996174] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.996174] env[68244]: value = "task-2780691" [ 973.996174] env[68244]: _type = "Task" [ 973.996174] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.004459] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780691, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.189014] env[68244]: DEBUG oslo_vmware.api [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30639} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.190446] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.190446] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.190446] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.190870] env[68244]: INFO nova.compute.manager [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 974.191160] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.191466] env[68244]: DEBUG nova.compute.manager [-] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.191668] env[68244]: DEBUG nova.network.neutron [-] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.238200] env[68244]: DEBUG nova.network.neutron [-] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.254220] env[68244]: DEBUG nova.compute.manager [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Received event network-changed-8cd0da80-092a-4e78-9a24-619080b3678d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 974.255953] env[68244]: DEBUG nova.compute.manager [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Refreshing instance network info cache due to event network-changed-8cd0da80-092a-4e78-9a24-619080b3678d. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 974.256274] env[68244]: DEBUG oslo_concurrency.lockutils [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] Acquiring lock "refresh_cache-4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.256462] env[68244]: DEBUG oslo_concurrency.lockutils [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] Acquired lock "refresh_cache-4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.256663] env[68244]: DEBUG nova.network.neutron [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Refreshing network info cache for port 8cd0da80-092a-4e78-9a24-619080b3678d {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.411391] env[68244]: DEBUG nova.scheduler.client.report [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.447064] env[68244]: DEBUG oslo_vmware.api [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Task: {'id': task-2780688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213695} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.450034] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.450034] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.450034] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.450034] env[68244]: INFO nova.compute.manager [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 974.450034] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.451071] env[68244]: DEBUG nova.compute.manager [-] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.453096] env[68244]: DEBUG nova.network.neutron [-] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.453960] env[68244]: INFO nova.compute.manager [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Took 47.37 seconds to build instance. [ 974.510122] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780691, 'name': CreateVM_Task, 'duration_secs': 0.357399} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.510338] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.511046] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.511223] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.511620] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 974.511881] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-812350b8-2d4f-49ec-9295-47c8da4ef49a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.516476] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 974.516476] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa97dd-7a40-b538-ec12-49e978be0c0a" [ 974.516476] env[68244]: _type = "Task" [ 974.516476] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.524421] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa97dd-7a40-b538-ec12-49e978be0c0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.743018] env[68244]: INFO nova.compute.manager [-] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Took 1.28 seconds to deallocate network for instance. [ 974.957824] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a107ddbf-9e62-4548-a6e5-9e0fa37ac180 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "91d45b22-7963-4615-8455-7d910a9a0fed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.880s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.029947] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52aa97dd-7a40-b538-ec12-49e978be0c0a, 'name': SearchDatastore_Task, 'duration_secs': 0.025885} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.034890] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.035146] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.035413] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.035528] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.035718] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.036394] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33ec3e31-78d3-4f51-a0af-aacd5253a0d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.050996] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.051213] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.051963] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2124ba79-1586-45cd-ab10-a98733eecfc8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.059336] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 975.059336] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d4add-08ef-d9b7-ae66-139866b656ad" [ 975.059336] env[68244]: _type = "Task" [ 975.059336] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.067109] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d4add-08ef-d9b7-ae66-139866b656ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.095322] env[68244]: DEBUG nova.network.neutron [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Updated VIF entry in instance network info cache for port 8cd0da80-092a-4e78-9a24-619080b3678d. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.095727] env[68244]: DEBUG nova.network.neutron [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Updating instance_info_cache with network_info: [{"id": "8cd0da80-092a-4e78-9a24-619080b3678d", "address": "fa:16:3e:7d:d8:f5", "network": {"id": "c1bbde04-6f70-48c3-80dd-337f42661747", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1702248020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea4277fe2cb0423496d8f74d02b9f26a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cd0da80-09", "ovs_interfaceid": "8cd0da80-092a-4e78-9a24-619080b3678d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.238176] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "91d45b22-7963-4615-8455-7d910a9a0fed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.238498] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "91d45b22-7963-4615-8455-7d910a9a0fed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.238736] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "91d45b22-7963-4615-8455-7d910a9a0fed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.238925] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "91d45b22-7963-4615-8455-7d910a9a0fed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.239105] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "91d45b22-7963-4615-8455-7d910a9a0fed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.241376] env[68244]: INFO nova.compute.manager [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Terminating instance [ 975.249719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.371923] env[68244]: DEBUG nova.network.neutron [-] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.426399] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.099s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.432421] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.752s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.432650] env[68244]: DEBUG nova.objects.instance [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 975.516810] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "2487689d-7a83-49d7-be78-fbb946ebef8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.516810] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.516958] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.517197] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.517663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.519515] env[68244]: INFO nova.compute.manager [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Terminating instance [ 975.570745] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d4add-08ef-d9b7-ae66-139866b656ad, 'name': SearchDatastore_Task, 'duration_secs': 0.03265} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.571564] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dd17cbe-699b-4139-bb2e-ec94e530e023 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.575380] env[68244]: DEBUG nova.network.neutron [-] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.577623] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 975.577623] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5213391b-449d-e4ff-b985-7370faccdcf0" [ 975.577623] env[68244]: _type = "Task" [ 975.577623] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.585664] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5213391b-449d-e4ff-b985-7370faccdcf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.598528] env[68244]: DEBUG oslo_concurrency.lockutils [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] Releasing lock "refresh_cache-4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.598781] env[68244]: DEBUG nova.compute.manager [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Received event network-vif-deleted-9b1a2db2-4410-496c-9c02-c9af80c39755 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 975.598978] env[68244]: INFO nova.compute.manager [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Neutron deleted interface 9b1a2db2-4410-496c-9c02-c9af80c39755; detaching it from the instance and deleting it from the info cache [ 975.599145] env[68244]: DEBUG nova.network.neutron [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.745360] env[68244]: DEBUG nova.compute.manager [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 975.745650] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.746804] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3ed4e8-370a-4b1e-a122-3443b886e8da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.754838] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.755113] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aca77e7f-db62-4bd9-bd70-ecde70da03f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.761977] env[68244]: DEBUG oslo_vmware.api [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 975.761977] env[68244]: value = "task-2780692" [ 975.761977] env[68244]: _type = "Task" [ 975.761977] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.769829] env[68244]: DEBUG oslo_vmware.api [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780692, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.874266] env[68244]: INFO nova.compute.manager [-] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Took 1.68 seconds to deallocate network for instance. [ 976.017507] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "9658b4e0-f4f9-4628-b700-19d94800961c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.017733] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.023069] env[68244]: DEBUG nova.compute.manager [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 976.023273] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 976.024142] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea73db9-6cf1-4670-9152-324dd7294feb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.033118] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 976.033390] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bda2d9e6-43ac-44af-a71b-01fed92d3f70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.041196] env[68244]: DEBUG oslo_vmware.api [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 976.041196] env[68244]: value = "task-2780693" [ 976.041196] env[68244]: _type = "Task" [ 976.041196] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.050382] env[68244]: DEBUG oslo_vmware.api [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.082019] env[68244]: INFO nova.compute.manager [-] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Took 1.63 seconds to deallocate network for instance. [ 976.094031] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5213391b-449d-e4ff-b985-7370faccdcf0, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.094497] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.094951] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598/4fe60c1b-fbfb-4bf0-b52a-7920fa87f598.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.095361] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dba7610-53a8-4f14-8b23-94f99b2685e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.102392] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 976.102392] env[68244]: value = "task-2780694" [ 976.102392] env[68244]: _type = "Task" [ 976.102392] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.102582] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67f3d454-408a-453b-9760-5b575955e64d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.113195] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.117305] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894cf871-537f-403b-ad4f-4b60e3db13d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.156747] env[68244]: DEBUG nova.compute.manager [req-01a568ba-ca93-4fb0-b294-f15903447688 req-212825db-33d7-47ed-a5b0-b4f28b15b7f0 service nova] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Detach interface failed, port_id=9b1a2db2-4410-496c-9c02-c9af80c39755, reason: Instance d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 976.272723] env[68244]: DEBUG oslo_vmware.api [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780692, 'name': PowerOffVM_Task, 'duration_secs': 0.199638} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.273051] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 976.273214] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 976.273476] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-886f424e-8e00-4556-81e4-6004b990dbd7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.312446] env[68244]: DEBUG nova.compute.manager [req-13cb630d-0608-45b1-87fe-04f03e967663 req-349a473a-efe2-47db-927c-463f6cc83c19 service nova] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Received event network-vif-deleted-58a98d40-5d72-49a3-9bec-97e83511b260 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 976.312446] env[68244]: DEBUG nova.compute.manager [req-13cb630d-0608-45b1-87fe-04f03e967663 req-349a473a-efe2-47db-927c-463f6cc83c19 service nova] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Received event network-vif-deleted-2258be90-64bd-4241-81f3-2cb028b7a8cc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 976.344513] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 976.344762] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 976.345805] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleting the datastore file [datastore2] 91d45b22-7963-4615-8455-7d910a9a0fed {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.346114] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5868b14f-c149-4e58-8184-a4774eac1d59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.353389] env[68244]: DEBUG oslo_vmware.api [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 976.353389] env[68244]: value = "task-2780696" [ 976.353389] env[68244]: _type = "Task" [ 976.353389] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.361402] env[68244]: DEBUG oslo_vmware.api [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.381265] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.449281] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2214f24b-2af3-432b-b0a8-a744eb2bcd72 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.454467] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.440s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.454717] env[68244]: DEBUG nova.objects.instance [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lazy-loading 'resources' on Instance uuid f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.520388] env[68244]: DEBUG nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 976.551559] env[68244]: DEBUG oslo_vmware.api [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780693, 'name': PowerOffVM_Task, 'duration_secs': 0.173545} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.551836] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 976.552013] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 976.552557] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e166ced-a99d-4408-bea6-25cca10fbba9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.591428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.614972] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780694, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460586} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.615281] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598/4fe60c1b-fbfb-4bf0-b52a-7920fa87f598.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.615499] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.616686] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27dd4a1e-c5a2-42c5-a551-2f8345a0edf5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.618659] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 976.618856] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 976.619016] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleting the datastore file [datastore2] 2487689d-7a83-49d7-be78-fbb946ebef8c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.619248] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aec7f889-bfe6-4a7e-a278-810d2fae04e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.625715] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 976.625715] env[68244]: value = "task-2780698" [ 976.625715] env[68244]: _type = "Task" [ 976.625715] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.626959] env[68244]: DEBUG oslo_vmware.api [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 976.626959] env[68244]: value = "task-2780699" [ 976.626959] env[68244]: _type = "Task" [ 976.626959] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.637234] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780698, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.641024] env[68244]: DEBUG oslo_vmware.api [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780699, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.863286] env[68244]: DEBUG oslo_vmware.api [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256328} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.863804] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.863804] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 976.863804] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 976.864017] env[68244]: INFO nova.compute.manager [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Took 1.12 seconds to destroy the instance on the hypervisor. [ 976.864217] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.864548] env[68244]: DEBUG nova.compute.manager [-] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 976.864548] env[68244]: DEBUG nova.network.neutron [-] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 976.977409] env[68244]: INFO nova.compute.manager [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Swapping old allocation on dict_keys(['b885cb16-3bd4-46d8-abd9-28a1bf1058e3']) held by migration 922aca6a-3d71-4286-8137-d883b7d4f8e4 for instance [ 977.003196] env[68244]: DEBUG nova.scheduler.client.report [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Overwriting current allocation {'allocations': {'b885cb16-3bd4-46d8-abd9-28a1bf1058e3': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 101}}, 'project_id': '5bf57141eff643a8b03f3b0576678ec1', 'user_id': '8587147149b84a34bfbbd01e2bb637b1', 'consumer_generation': 1} on consumer d74a0d56-8656-429c-a703-fca87e07798f {{(pid=68244) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 977.041100] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.100422] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.100422] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquired lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.100422] env[68244]: DEBUG nova.network.neutron [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.145625] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780698, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060642} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.151447] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.152107] env[68244]: DEBUG oslo_vmware.api [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780699, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173378} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.153132] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4261de-3eb4-461a-95fc-bcc212ff0812 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.156075] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.156275] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.156499] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.156727] env[68244]: INFO nova.compute.manager [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 977.157091] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.157227] env[68244]: DEBUG nova.compute.manager [-] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 977.157340] env[68244]: DEBUG nova.network.neutron [-] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 977.179904] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598/4fe60c1b-fbfb-4bf0-b52a-7920fa87f598.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.184068] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83d91699-6e0d-4346-92a1-f2533f3d2895 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.205476] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 977.205476] env[68244]: value = "task-2780700" [ 977.205476] env[68244]: _type = "Task" [ 977.205476] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.215208] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780700, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.390975] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a544dc11-7b6f-4823-a8fc-fb8cdb11c2a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.399465] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776a9139-4f7e-4863-a426-90129fb79ace {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.432830] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003ce6eb-0371-47ae-bd06-0b518c8641a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.440573] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaf3dd5-884b-4061-975e-8c45275f1195 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.454501] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 977.624812] env[68244]: DEBUG nova.network.neutron [-] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.715661] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.793144] env[68244]: DEBUG nova.network.neutron [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [{"id": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "address": "fa:16:3e:92:a5:cb", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ac021e-cd", "ovs_interfaceid": "c9ac021e-cd9a-4092-8f49-fd149000b0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.936533] env[68244]: DEBUG nova.network.neutron [-] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.976398] env[68244]: ERROR nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [req-dee29f5a-c75d-4c81-be7b-1842213a868c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dee29f5a-c75d-4c81-be7b-1842213a868c"}]} [ 977.993036] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 978.007573] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 978.007807] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.022597] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 978.041343] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 978.127171] env[68244]: INFO nova.compute.manager [-] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Took 1.26 seconds to deallocate network for instance. [ 978.219109] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780700, 'name': ReconfigVM_Task, 'duration_secs': 0.700435} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.219468] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598/4fe60c1b-fbfb-4bf0-b52a-7920fa87f598.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.220059] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdc31237-ee7f-4381-a5f9-b4913a3f7661 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.226340] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 978.226340] env[68244]: value = "task-2780701" [ 978.226340] env[68244]: _type = "Task" [ 978.226340] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.234054] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780701, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.295776] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Releasing lock "refresh_cache-d74a0d56-8656-429c-a703-fca87e07798f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.296297] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 978.296593] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-217cd3ca-c879-42b6-a41d-13eb11b2dc35 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.305342] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 978.305342] env[68244]: value = "task-2780702" [ 978.305342] env[68244]: _type = "Task" [ 978.305342] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.315391] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780702, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.381801] env[68244]: DEBUG nova.compute.manager [req-6fe1960b-e655-46d5-9ced-b89b38b64e17 req-f600dc56-cba3-4235-a186-1a265a9a79c6 service nova] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Received event network-vif-deleted-d5504b3b-2d7f-4cbf-85ac-8e0f4f983fa3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 978.382426] env[68244]: DEBUG nova.compute.manager [req-6fe1960b-e655-46d5-9ced-b89b38b64e17 req-f600dc56-cba3-4235-a186-1a265a9a79c6 service nova] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Received event network-vif-deleted-97fdf60d-e090-463d-ae82-229571208a74 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 978.419943] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1add844-3691-418d-ab39-ef7f76378da1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.427689] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd742584-3d4d-4f34-8b9a-9c9d2f5c4f63 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.458019] env[68244]: INFO nova.compute.manager [-] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Took 1.30 seconds to deallocate network for instance. [ 978.460772] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244f1db8-3ca1-4111-a6c7-66eaf7b8a191 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.470685] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00aa0da-910a-4bf8-a7d8-8815694c2878 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.484055] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.636550] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.735893] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780701, 'name': Rename_Task, 'duration_secs': 0.14412} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.736197] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.736516] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1385234d-6ac8-493d-8c02-ffe20eb8b6ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.743145] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 978.743145] env[68244]: value = "task-2780703" [ 978.743145] env[68244]: _type = "Task" [ 978.743145] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.759851] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780703, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.814748] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780702, 'name': PowerOffVM_Task, 'duration_secs': 0.206403} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.815036] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 978.815714] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:23:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='62be5865-e959-4fad-8733-6e5a5e5fb9d8',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-283340065',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 978.815975] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.816161] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 978.816360] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.816522] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 978.816686] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 978.816905] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 978.817087] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 978.817317] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 978.817447] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 978.817637] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 978.822764] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6f117fe-dea0-496a-981b-56a91c629411 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.837404] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 978.837404] env[68244]: value = "task-2780704" [ 978.837404] env[68244]: _type = "Task" [ 978.837404] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.846898] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780704, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.968462] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.005129] env[68244]: ERROR nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [req-696c33ca-5dfd-4089-9029-f4e3718e5b07] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-696c33ca-5dfd-4089-9029-f4e3718e5b07"}]} [ 979.019869] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 979.032019] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 979.032337] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.042817] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 979.062101] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 979.256526] env[68244]: DEBUG oslo_vmware.api [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780703, 'name': PowerOnVM_Task, 'duration_secs': 0.438834} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.256807] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.257113] env[68244]: INFO nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Took 7.53 seconds to spawn the instance on the hypervisor. [ 979.257309] env[68244]: DEBUG nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.258083] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd59ba08-2a74-489e-ac95-e3796b0bd03e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.346681] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780704, 'name': ReconfigVM_Task, 'duration_secs': 0.128693} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.350066] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5a9830-7bbd-4cfd-9786-abe1fdf68872 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.368086] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:23:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='62be5865-e959-4fad-8733-6e5a5e5fb9d8',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-283340065',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.368264] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.368436] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.368707] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.368798] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.368950] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.369393] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.369393] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.369515] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.369680] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.369862] env[68244]: DEBUG nova.virt.hardware [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 979.372943] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9791c1f-881e-47fe-bc87-4ca479d4bc69 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.378733] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 979.378733] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e41d67-2876-ce75-537a-b3dca2d1ba6e" [ 979.378733] env[68244]: _type = "Task" [ 979.378733] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.388573] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "b84c2c08-651a-407d-89dd-177bc5d90313" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.388810] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "b84c2c08-651a-407d-89dd-177bc5d90313" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.393711] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e41d67-2876-ce75-537a-b3dca2d1ba6e, 'name': SearchDatastore_Task, 'duration_secs': 0.007248} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.399135] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfiguring VM instance instance-0000003a to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 979.400100] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7536e762-0b49-4185-be62-01939c4bbb80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.420784] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 979.420784] env[68244]: value = "task-2780705" [ 979.420784] env[68244]: _type = "Task" [ 979.420784] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.431177] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780705, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.460014] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c98ef8-2aba-4418-aad5-64a553982a97 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.467792] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8bc196-da7d-4195-9fe5-7d1136996068 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.501255] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdd6a05-f410-4b01-b9fe-2752383e17d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.509209] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b96e01-8abf-401a-a4c5-d4b33ade7dea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.523267] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.566811] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.567079] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.777192] env[68244]: INFO nova.compute.manager [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Took 22.41 seconds to build instance. [ 979.894913] env[68244]: DEBUG nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 979.931632] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780705, 'name': ReconfigVM_Task, 'duration_secs': 0.446666} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.932532] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfigured VM instance instance-0000003a to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 979.933361] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f66f7c3-af7e-4f01-b9bf-1b9f708c6e70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.954984] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.955487] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb1bd7f6-25a0-4c07-888f-cb88a874ce42 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.972479] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 979.972479] env[68244]: value = "task-2780706" [ 979.972479] env[68244]: _type = "Task" [ 979.972479] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.980272] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780706, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.046723] env[68244]: ERROR nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] [req-db531c90-0502-4ef2-8948-6fd4cd296f45] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-db531c90-0502-4ef2-8948-6fd4cd296f45"}]} [ 980.064029] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 980.069359] env[68244]: DEBUG nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 980.078315] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 980.078597] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 980.091427] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 980.111886] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 980.279610] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18fc5630-ae38-4dbd-bca0-e5261b691d31 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.925s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.412095] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.470066] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50979ce-86ad-49a6-91d6-83d94f8ac35b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.483116] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b504b0-99d7-416f-86da-7cd0c1cb82d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.486271] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780706, 'name': ReconfigVM_Task, 'duration_secs': 0.285584} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.486911] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Reconfigured VM instance instance-0000003a to attach disk [datastore2] d74a0d56-8656-429c-a703-fca87e07798f/d74a0d56-8656-429c-a703-fca87e07798f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.488029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffd8add-93ea-42ca-95cb-a651d653c31a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.515371] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b3ef56-e915-4e34-8792-a244c21b4d7a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.531344] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f84e3fa-8e56-4357-b301-1f42ef1984a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.536660] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404fa383-7127-4f19-9842-d1454ec818fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.555039] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336d2b4f-782f-49f7-a01d-a5488ef1d91c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.564833] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 980.583582] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a82b75-d28f-486b-8c55-b555159ac91f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.594132] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 980.594374] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ee2c76f-7e29-4e65-b83e-25ce50d95c88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.599627] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 980.599627] env[68244]: value = "task-2780707" [ 980.599627] env[68244]: _type = "Task" [ 980.599627] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.600435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.607777] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.723778] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.724061] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.724279] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.724463] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.724636] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.726697] env[68244]: INFO nova.compute.manager [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Terminating instance [ 981.102828] env[68244]: DEBUG nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 981.103108] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 104 to 105 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 981.103296] env[68244]: DEBUG nova.compute.provider_tree [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 981.116110] env[68244]: DEBUG oslo_vmware.api [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780707, 'name': PowerOnVM_Task, 'duration_secs': 0.358609} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.116526] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.231348] env[68244]: DEBUG nova.compute.manager [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 981.231566] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.232466] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eec291d-3460-440c-a473-ab1193e6c49a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.240598] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.240882] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f590f51a-732d-460c-9c85-4d544ae2d6cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.246959] env[68244]: DEBUG oslo_vmware.api [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 981.246959] env[68244]: value = "task-2780708" [ 981.246959] env[68244]: _type = "Task" [ 981.246959] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.255456] env[68244]: DEBUG oslo_vmware.api [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780708, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.613027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.158s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.616057] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.941s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.617490] env[68244]: INFO nova.compute.claims [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.636122] env[68244]: INFO nova.scheduler.client.report [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Deleted allocations for instance f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f [ 981.756520] env[68244]: DEBUG oslo_vmware.api [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780708, 'name': PowerOffVM_Task, 'duration_secs': 0.200956} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.757118] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.757118] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.757211] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73468d4b-abf0-477e-9f7d-877ca52e5dab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.843346] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 981.843680] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 981.843951] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Deleting the datastore file [datastore1] 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.844315] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6ee232c-503e-4d6e-8ae8-dd0966fbfb60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.851962] env[68244]: DEBUG oslo_vmware.api [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for the task: (returnval){ [ 981.851962] env[68244]: value = "task-2780710" [ 981.851962] env[68244]: _type = "Task" [ 981.851962] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.859296] env[68244]: DEBUG oslo_vmware.api [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.125108] env[68244]: INFO nova.compute.manager [None req-1724ee71-1eb8-492d-ae1e-857fbf719c85 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance to original state: 'active' [ 982.143671] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846d1249-72fe-45c4-b8f9-795ecfed3774 tempest-VolumesAdminNegativeTest-1310626418 tempest-VolumesAdminNegativeTest-1310626418-project-member] Lock "f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.661s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.361126] env[68244]: DEBUG oslo_vmware.api [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Task: {'id': task-2780710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128705} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.361350] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.361531] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.361718] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.361896] env[68244]: INFO nova.compute.manager [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Took 1.13 seconds to destroy the instance on the hypervisor. [ 982.362152] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.362343] env[68244]: DEBUG nova.compute.manager [-] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.362438] env[68244]: DEBUG nova.network.neutron [-] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.639090] env[68244]: DEBUG nova.compute.manager [req-0746f363-cb46-4014-a657-da73e7ecd7be req-b11da36a-04aa-4d4e-bce9-bc9e7a3e47a9 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Received event network-vif-deleted-8cd0da80-092a-4e78-9a24-619080b3678d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 982.639090] env[68244]: INFO nova.compute.manager [req-0746f363-cb46-4014-a657-da73e7ecd7be req-b11da36a-04aa-4d4e-bce9-bc9e7a3e47a9 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Neutron deleted interface 8cd0da80-092a-4e78-9a24-619080b3678d; detaching it from the instance and deleting it from the info cache [ 982.639472] env[68244]: DEBUG nova.network.neutron [req-0746f363-cb46-4014-a657-da73e7ecd7be req-b11da36a-04aa-4d4e-bce9-bc9e7a3e47a9 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.053794] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49942ca7-7a73-4901-a9e5-e080563cf201 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.061741] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8be5b8-d054-475c-bfdb-7934a0f3c33b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.094697] env[68244]: DEBUG nova.network.neutron [-] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.097418] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fce85e-4e72-46a5-aab0-5595b0ae96be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.105192] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d915e974-27e8-4ce4-b5e3-2ab1a4c78a2e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.120389] env[68244]: DEBUG nova.compute.provider_tree [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.144030] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77d3b623-df50-4826-aeab-3e75a594c2ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.156118] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ff9530-c74e-4e6f-af93-7221714a08e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.195247] env[68244]: DEBUG nova.compute.manager [req-0746f363-cb46-4014-a657-da73e7ecd7be req-b11da36a-04aa-4d4e-bce9-bc9e7a3e47a9 service nova] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Detach interface failed, port_id=8cd0da80-092a-4e78-9a24-619080b3678d, reason: Instance 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 983.601187] env[68244]: INFO nova.compute.manager [-] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Took 1.24 seconds to deallocate network for instance. [ 983.623156] env[68244]: DEBUG nova.scheduler.client.report [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.782642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "d74a0d56-8656-429c-a703-fca87e07798f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.782936] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.783153] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "d74a0d56-8656-429c-a703-fca87e07798f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.783333] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.783500] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.785905] env[68244]: INFO nova.compute.manager [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Terminating instance [ 984.107801] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.129568] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.130115] env[68244]: DEBUG nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 984.133232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.346s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.133504] env[68244]: DEBUG nova.objects.instance [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lazy-loading 'resources' on Instance uuid 183ac01e-82b1-470e-9e8f-a8aefb4c64c3 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.290080] env[68244]: DEBUG nova.compute.manager [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 984.290385] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 984.291422] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ea2c60-ff76-421a-b7bc-71f404bc47d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.300348] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.300636] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-278e3b86-dc55-423b-934c-8b9d49eddf8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.308711] env[68244]: DEBUG oslo_vmware.api [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 984.308711] env[68244]: value = "task-2780711" [ 984.308711] env[68244]: _type = "Task" [ 984.308711] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.316763] env[68244]: DEBUG oslo_vmware.api [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780711, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.636563] env[68244]: DEBUG nova.compute.utils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 984.641426] env[68244]: DEBUG nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 984.641621] env[68244]: DEBUG nova.network.neutron [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 984.697100] env[68244]: DEBUG nova.policy [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fa4f9c8b0f1450788cd56a89e23d93a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a581fe596ee49c6b66f17d1ed11d120', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 984.819473] env[68244]: DEBUG oslo_vmware.api [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780711, 'name': PowerOffVM_Task, 'duration_secs': 0.187307} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.819807] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.820070] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.820346] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10e54687-85df-4c8e-8cd4-593e2b004f4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.904718] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.905202] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.905567] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Deleting the datastore file [datastore2] d74a0d56-8656-429c-a703-fca87e07798f {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.907791] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbd75541-5e89-41c2-b5a8-993075473fa0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.911968] env[68244]: DEBUG oslo_vmware.api [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 984.911968] env[68244]: value = "task-2780713" [ 984.911968] env[68244]: _type = "Task" [ 984.911968] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.923331] env[68244]: DEBUG oslo_vmware.api [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.002676] env[68244]: DEBUG nova.network.neutron [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Successfully created port: 096eaf97-e1ef-4622-b4ab-5300ed10d060 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.108497] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de48171-7f1b-44e8-98b4-912f839bae53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.115662] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d012fe-6f87-49dd-b1db-90458f0556b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.147507] env[68244]: DEBUG nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 985.151915] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e949bfec-680c-48fc-b78e-337e85ba0942 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.164608] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3306d711-757f-4d7a-a826-6c232cfd042c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.184951] env[68244]: DEBUG nova.compute.provider_tree [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.421967] env[68244]: DEBUG oslo_vmware.api [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149132} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.422315] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.422548] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.422789] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.423117] env[68244]: INFO nova.compute.manager [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 985.423410] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 985.423650] env[68244]: DEBUG nova.compute.manager [-] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 985.423785] env[68244]: DEBUG nova.network.neutron [-] [instance: d74a0d56-8656-429c-a703-fca87e07798f] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 985.652485] env[68244]: DEBUG nova.compute.manager [req-3d635d4b-339a-4f1c-9a12-a0f00356ffa4 req-c9a3d2ae-0267-436a-bc5c-c40dcc72a5f6 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Received event network-vif-deleted-c9ac021e-cd9a-4092-8f49-fd149000b0aa {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 985.652717] env[68244]: INFO nova.compute.manager [req-3d635d4b-339a-4f1c-9a12-a0f00356ffa4 req-c9a3d2ae-0267-436a-bc5c-c40dcc72a5f6 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Neutron deleted interface c9ac021e-cd9a-4092-8f49-fd149000b0aa; detaching it from the instance and deleting it from the info cache [ 985.652876] env[68244]: DEBUG nova.network.neutron [req-3d635d4b-339a-4f1c-9a12-a0f00356ffa4 req-c9a3d2ae-0267-436a-bc5c-c40dcc72a5f6 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.688767] env[68244]: DEBUG nova.scheduler.client.report [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.138114] env[68244]: DEBUG nova.network.neutron [-] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.156644] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e42b4ab2-cfb9-440c-802f-272a9380b642 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.163549] env[68244]: DEBUG nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 986.172762] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424a05bb-2adf-462d-a6d7-528a946f89ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.195034] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.061s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.199478] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 986.199740] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.199918] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 986.200359] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.200420] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 986.200578] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 986.200769] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 986.201247] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 986.201247] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 986.202097] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 986.202097] env[68244]: DEBUG nova.virt.hardware [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 986.202097] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.556s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.203626] env[68244]: INFO nova.compute.claims [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 986.207723] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04bb09f-940c-4b80-ae38-5184026f5916 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.224195] env[68244]: DEBUG nova.compute.manager [req-3d635d4b-339a-4f1c-9a12-a0f00356ffa4 req-c9a3d2ae-0267-436a-bc5c-c40dcc72a5f6 service nova] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Detach interface failed, port_id=c9ac021e-cd9a-4092-8f49-fd149000b0aa, reason: Instance d74a0d56-8656-429c-a703-fca87e07798f could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 986.234402] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe992c4-1406-4801-9330-03e7bc18a5c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.239795] env[68244]: INFO nova.scheduler.client.report [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted allocations for instance 183ac01e-82b1-470e-9e8f-a8aefb4c64c3 [ 986.513793] env[68244]: DEBUG nova.network.neutron [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Successfully updated port: 096eaf97-e1ef-4622-b4ab-5300ed10d060 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 986.641429] env[68244]: INFO nova.compute.manager [-] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Took 1.22 seconds to deallocate network for instance. [ 986.759576] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19f1bf73-6a11-4462-834c-fb39f661f5ce tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "183ac01e-82b1-470e-9e8f-a8aefb4c64c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.507s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.016789] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "refresh_cache-60c502f4-8c4b-433e-ad4f-9351048abe11" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.016789] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "refresh_cache-60c502f4-8c4b-433e-ad4f-9351048abe11" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.016789] env[68244]: DEBUG nova.network.neutron [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.150192] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.539477] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.540301] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.540634] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.540903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.541168] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.543474] env[68244]: INFO nova.compute.manager [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Terminating instance [ 987.554245] env[68244]: DEBUG nova.network.neutron [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.681218] env[68244]: DEBUG nova.compute.manager [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Received event network-vif-plugged-096eaf97-e1ef-4622-b4ab-5300ed10d060 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 987.681437] env[68244]: DEBUG oslo_concurrency.lockutils [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] Acquiring lock "60c502f4-8c4b-433e-ad4f-9351048abe11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.681653] env[68244]: DEBUG oslo_concurrency.lockutils [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.681835] env[68244]: DEBUG oslo_concurrency.lockutils [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.682011] env[68244]: DEBUG nova.compute.manager [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] No waiting events found dispatching network-vif-plugged-096eaf97-e1ef-4622-b4ab-5300ed10d060 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.682319] env[68244]: WARNING nova.compute.manager [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Received unexpected event network-vif-plugged-096eaf97-e1ef-4622-b4ab-5300ed10d060 for instance with vm_state building and task_state spawning. [ 987.682485] env[68244]: DEBUG nova.compute.manager [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Received event network-changed-096eaf97-e1ef-4622-b4ab-5300ed10d060 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 987.682639] env[68244]: DEBUG nova.compute.manager [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Refreshing instance network info cache due to event network-changed-096eaf97-e1ef-4622-b4ab-5300ed10d060. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 987.682808] env[68244]: DEBUG oslo_concurrency.lockutils [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] Acquiring lock "refresh_cache-60c502f4-8c4b-433e-ad4f-9351048abe11" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.704204] env[68244]: DEBUG nova.network.neutron [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Updating instance_info_cache with network_info: [{"id": "096eaf97-e1ef-4622-b4ab-5300ed10d060", "address": "fa:16:3e:57:fc:d8", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096eaf97-e1", "ovs_interfaceid": "096eaf97-e1ef-4622-b4ab-5300ed10d060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.728925] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df666570-32d6-4ba9-b079-6bcd13748cbe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.737338] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51678096-7997-45f1-9ed8-8843b0807a18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.771708] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c798421-55a3-46a1-9e4c-aaeef3ebf966 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.781029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30061e50-29e2-4c93-9b3f-1fe562b466ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.793596] env[68244]: DEBUG nova.compute.provider_tree [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.051039] env[68244]: DEBUG nova.compute.manager [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 988.051039] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.051039] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab4fd70-9dfd-40bb-be78-9b0efe569fb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.059690] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.059690] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02c317f6-f234-4276-bd02-83e5a23e875f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.067191] env[68244]: DEBUG oslo_vmware.api [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 988.067191] env[68244]: value = "task-2780714" [ 988.067191] env[68244]: _type = "Task" [ 988.067191] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.074075] env[68244]: DEBUG oslo_vmware.api [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.207428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "refresh_cache-60c502f4-8c4b-433e-ad4f-9351048abe11" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.207800] env[68244]: DEBUG nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Instance network_info: |[{"id": "096eaf97-e1ef-4622-b4ab-5300ed10d060", "address": "fa:16:3e:57:fc:d8", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096eaf97-e1", "ovs_interfaceid": "096eaf97-e1ef-4622-b4ab-5300ed10d060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 988.208098] env[68244]: DEBUG oslo_concurrency.lockutils [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] Acquired lock "refresh_cache-60c502f4-8c4b-433e-ad4f-9351048abe11" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.208293] env[68244]: DEBUG nova.network.neutron [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Refreshing network info cache for port 096eaf97-e1ef-4622-b4ab-5300ed10d060 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.209779] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:fc:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '096eaf97-e1ef-4622-b4ab-5300ed10d060', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.219156] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 988.220202] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.220433] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0661e115-358d-4f57-886e-d4c6fea929af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.243613] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.243613] env[68244]: value = "task-2780715" [ 988.243613] env[68244]: _type = "Task" [ 988.243613] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.251924] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780715, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.297382] env[68244]: DEBUG nova.scheduler.client.report [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.574804] env[68244]: DEBUG oslo_vmware.api [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780714, 'name': PowerOffVM_Task, 'duration_secs': 0.207586} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.575125] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.575315] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.575558] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b9a318f-eae7-4e28-9fe0-8a6690d5c96e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.641067] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.641298] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.641480] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleting the datastore file [datastore2] 45ec526b-e9d8-4ea3-b0c8-af6da39b0158 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.641751] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f38709a-9ddd-46be-b291-e0af08f3c0a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.648405] env[68244]: DEBUG oslo_vmware.api [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 988.648405] env[68244]: value = "task-2780717" [ 988.648405] env[68244]: _type = "Task" [ 988.648405] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.656473] env[68244]: DEBUG oslo_vmware.api [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.755537] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780715, 'name': CreateVM_Task, 'duration_secs': 0.313278} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.757784] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.758363] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.758524] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.758878] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 988.759584] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-439ad9f4-58ab-4446-87e1-c54d394eb928 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.764228] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 988.764228] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9ffdd-432d-7676-ad22-6831d8ec6eb2" [ 988.764228] env[68244]: _type = "Task" [ 988.764228] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.772872] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9ffdd-432d-7676-ad22-6831d8ec6eb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.803562] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.803975] env[68244]: DEBUG nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 988.806532] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.827s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.933085] env[68244]: DEBUG nova.network.neutron [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Updated VIF entry in instance network info cache for port 096eaf97-e1ef-4622-b4ab-5300ed10d060. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.933430] env[68244]: DEBUG nova.network.neutron [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Updating instance_info_cache with network_info: [{"id": "096eaf97-e1ef-4622-b4ab-5300ed10d060", "address": "fa:16:3e:57:fc:d8", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096eaf97-e1", "ovs_interfaceid": "096eaf97-e1ef-4622-b4ab-5300ed10d060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.165024] env[68244]: DEBUG oslo_vmware.api [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131861} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.165024] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.165024] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.165024] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.165298] env[68244]: INFO nova.compute.manager [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Took 1.12 seconds to destroy the instance on the hypervisor. [ 989.169020] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.169020] env[68244]: DEBUG nova.compute.manager [-] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.169020] env[68244]: DEBUG nova.network.neutron [-] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.275515] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9ffdd-432d-7676-ad22-6831d8ec6eb2, 'name': SearchDatastore_Task, 'duration_secs': 0.011714} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.276185] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.276632] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.277237] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.277438] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.277681] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.277985] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efb36d87-7893-4f2e-b0c1-299d1614cd72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.289355] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.289622] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.290443] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc48206-74aa-4c44-81e4-00c380fd2937 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.297905] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 989.297905] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b2c6cd-da0a-0392-a1b4-2d81291abcf9" [ 989.297905] env[68244]: _type = "Task" [ 989.297905] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.305766] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b2c6cd-da0a-0392-a1b4-2d81291abcf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.310080] env[68244]: DEBUG nova.compute.utils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 989.311452] env[68244]: DEBUG nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 989.436785] env[68244]: DEBUG oslo_concurrency.lockutils [req-7d2c06df-776b-47e4-8ded-61d867f301f2 req-1dffb418-1688-416b-b7f6-7b6580a604ee service nova] Releasing lock "refresh_cache-60c502f4-8c4b-433e-ad4f-9351048abe11" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.712295] env[68244]: DEBUG nova.compute.manager [req-3136452e-fc82-4e02-bc7a-4656d0154d61 req-f630d40b-ef80-48e9-8a79-b43f485ce3a8 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Received event network-vif-deleted-26966576-ec16-40c4-b057-eb88b817f439 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 989.713084] env[68244]: INFO nova.compute.manager [req-3136452e-fc82-4e02-bc7a-4656d0154d61 req-f630d40b-ef80-48e9-8a79-b43f485ce3a8 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Neutron deleted interface 26966576-ec16-40c4-b057-eb88b817f439; detaching it from the instance and deleting it from the info cache [ 989.713084] env[68244]: DEBUG nova.network.neutron [req-3136452e-fc82-4e02-bc7a-4656d0154d61 req-f630d40b-ef80-48e9-8a79-b43f485ce3a8 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.757569] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a901d5-60d4-4dec-8f6b-7442857acccd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.767029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee77741-1350-45df-b320-02983694009d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.812295] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ae9d12-90f4-4597-a0a1-55590cba1810 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.815165] env[68244]: DEBUG nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 989.827805] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b2c6cd-da0a-0392-a1b4-2d81291abcf9, 'name': SearchDatastore_Task, 'duration_secs': 0.010413} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.828583] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138b76cd-d899-468b-9d54-9d0fc869b24c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.835344] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e628ab6-1d44-438c-b991-219ef3693309 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.847645] env[68244]: DEBUG nova.compute.provider_tree [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.850928] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 989.850928] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d853c9-06e0-ff25-efe6-6af32543477a" [ 989.850928] env[68244]: _type = "Task" [ 989.850928] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.859891] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d853c9-06e0-ff25-efe6-6af32543477a, 'name': SearchDatastore_Task, 'duration_secs': 0.009883} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.863838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.863838] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 60c502f4-8c4b-433e-ad4f-9351048abe11/60c502f4-8c4b-433e-ad4f-9351048abe11.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.863838] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd522222-b141-422b-97cf-b059a0daf2c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.869313] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 989.869313] env[68244]: value = "task-2780718" [ 989.869313] env[68244]: _type = "Task" [ 989.869313] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.879086] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.923747] env[68244]: DEBUG nova.network.neutron [-] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.219773] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d0d8d6d-c6e5-48fa-b439-0cd3537c516b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.234443] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf7ebe0-fdb8-4709-bd83-7617e57c2297 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.286031] env[68244]: DEBUG nova.compute.manager [req-3136452e-fc82-4e02-bc7a-4656d0154d61 req-f630d40b-ef80-48e9-8a79-b43f485ce3a8 service nova] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Detach interface failed, port_id=26966576-ec16-40c4-b057-eb88b817f439, reason: Instance 45ec526b-e9d8-4ea3-b0c8-af6da39b0158 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 990.352871] env[68244]: DEBUG nova.scheduler.client.report [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.381922] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457085} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.382196] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 60c502f4-8c4b-433e-ad4f-9351048abe11/60c502f4-8c4b-433e-ad4f-9351048abe11.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.382407] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.382691] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8b4a6b4-c209-46b3-86b6-1d936a740a73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.388951] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 990.388951] env[68244]: value = "task-2780719" [ 990.388951] env[68244]: _type = "Task" [ 990.388951] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.397727] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.428049] env[68244]: INFO nova.compute.manager [-] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Took 1.26 seconds to deallocate network for instance. [ 990.826064] env[68244]: DEBUG nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 990.849464] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 990.849464] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.849464] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 990.849464] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.849464] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 990.849774] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 990.849825] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 990.849957] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 990.850139] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 990.850299] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 990.850469] env[68244]: DEBUG nova.virt.hardware [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 990.851345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766e41ab-4e08-42e2-a0d3-a06f65be5535 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.863931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28770269-0eac-4dc6-bf6f-cd8020a01a11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.883867] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 990.889442] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Creating folder: Project (ca4941e8533f4d6b950d22c2be9afa68). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 990.890485] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb87400a-f080-464a-b906-9a8b468fe7b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.900217] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060713} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.900469] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.901221] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca14bf2-7de0-4bc4-8337-f52d38f9cde6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.904836] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Created folder: Project (ca4941e8533f4d6b950d22c2be9afa68) in parent group-v558876. [ 990.905043] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Creating folder: Instances. Parent ref: group-v559070. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 990.905545] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-310d29f1-3d07-4630-b294-fadd49bec1d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.926955] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 60c502f4-8c4b-433e-ad4f-9351048abe11/60c502f4-8c4b-433e-ad4f-9351048abe11.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.928471] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-daaca3a2-e772-4a17-be73-5c02f081402b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.943508] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Created folder: Instances in parent group-v559070. [ 990.943770] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.944745] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.945295] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 990.946553] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-200a9da1-30ce-4e60-a88d-2cf53fc60a64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.959663] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 990.959663] env[68244]: value = "task-2780722" [ 990.959663] env[68244]: _type = "Task" [ 990.959663] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.964226] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 990.964226] env[68244]: value = "task-2780723" [ 990.964226] env[68244]: _type = "Task" [ 990.964226] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.970844] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.975744] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780723, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.374586] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.567s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.374586] env[68244]: DEBUG nova.compute.manager [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68244) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 991.377320] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.128s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.377613] env[68244]: DEBUG nova.objects.instance [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lazy-loading 'resources' on Instance uuid d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.471370] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780722, 'name': ReconfigVM_Task, 'duration_secs': 0.305866} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.472147] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 60c502f4-8c4b-433e-ad4f-9351048abe11/60c502f4-8c4b-433e-ad4f-9351048abe11.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.472827] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc84cec0-d9bf-4314-a72f-b8a070cc21e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.477157] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780723, 'name': CreateVM_Task, 'duration_secs': 0.310599} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.477711] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 991.478213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.478449] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.478871] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 991.479165] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16cae6d1-f54a-42c3-933c-5acaec3b4f49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.481949] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 991.481949] env[68244]: value = "task-2780724" [ 991.481949] env[68244]: _type = "Task" [ 991.481949] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.486765] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 991.486765] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7ed41-5ce8-bfbc-72d6-65cbed5daab6" [ 991.486765] env[68244]: _type = "Task" [ 991.486765] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.492965] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780724, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.499188] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7ed41-5ce8-bfbc-72d6-65cbed5daab6, 'name': SearchDatastore_Task, 'duration_secs': 0.008718} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.499522] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.499839] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.500618] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.500618] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.500769] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.501123] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b062fe5-16df-4938-930a-d1c5c26fca80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.508579] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.508786] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.509561] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7d1c524-3f38-4671-82b8-84d20564b686 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.515186] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 991.515186] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f9db2d-0d99-8b1b-e6de-9fd447454f0d" [ 991.515186] env[68244]: _type = "Task" [ 991.515186] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.524604] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f9db2d-0d99-8b1b-e6de-9fd447454f0d, 'name': SearchDatastore_Task, 'duration_secs': 0.007236} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.525443] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaeeec0f-63c0-4a07-9e2b-c2d159674f0c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.530473] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 991.530473] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a4c74f-853b-3cdd-ede0-d10ee8740260" [ 991.530473] env[68244]: _type = "Task" [ 991.530473] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.538267] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a4c74f-853b-3cdd-ede0-d10ee8740260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.757938] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.758228] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.935785] env[68244]: INFO nova.scheduler.client.report [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleted allocation for migration cf3eda49-2122-4c27-a9c6-168c1bbcf3b9 [ 991.992710] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780724, 'name': Rename_Task, 'duration_secs': 0.143617} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.995241] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.996147] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56def9dd-ae66-48e5-be47-3260c0be3aa8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.003129] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 992.003129] env[68244]: value = "task-2780725" [ 992.003129] env[68244]: _type = "Task" [ 992.003129] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.015061] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780725, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.042843] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a4c74f-853b-3cdd-ede0-d10ee8740260, 'name': SearchDatastore_Task, 'duration_secs': 0.007992} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.045358] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.045594] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.046506] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c668e5f9-163e-483a-9556-01e51f68d83d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.052709] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 992.052709] env[68244]: value = "task-2780726" [ 992.052709] env[68244]: _type = "Task" [ 992.052709] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.063988] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.262025] env[68244]: DEBUG nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 992.270177] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd4b69b-b3b3-41c0-875c-bfa6d977f112 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.278396] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47069d85-f4ff-48df-9e00-6590f6e83021 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.312662] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bc9237-88f7-4f62-bcb6-561bd40752b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.324910] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce0b2cf-e88c-48f2-9dab-72c4ef6ebf61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.340685] env[68244]: DEBUG nova.compute.provider_tree [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.443454] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6cfad52e-f2ae-48d0-abe8-eb1f9ad6e320 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 24.858s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.514472] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780725, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.564391] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780726, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.779952] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.819851] env[68244]: DEBUG nova.objects.instance [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'flavor' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.843466] env[68244]: DEBUG nova.scheduler.client.report [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.014334] env[68244]: DEBUG oslo_vmware.api [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780725, 'name': PowerOnVM_Task, 'duration_secs': 0.744805} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.014658] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.014908] env[68244]: INFO nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Took 6.85 seconds to spawn the instance on the hypervisor. [ 993.015141] env[68244]: DEBUG nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 993.015920] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e98d2c-9940-4cf9-9897-3f178a083e55 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.063580] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513869} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.063795] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 993.063997] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 993.064249] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1526142-a7e8-44bf-a946-546d0e5394a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.070853] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 993.070853] env[68244]: value = "task-2780727" [ 993.070853] env[68244]: _type = "Task" [ 993.070853] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.078319] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.327658] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.327844] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.328030] env[68244]: DEBUG nova.network.neutron [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.328213] env[68244]: DEBUG nova.objects.instance [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'info_cache' on Instance uuid e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.347922] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.350584] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.970s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.350807] env[68244]: DEBUG nova.objects.instance [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lazy-loading 'resources' on Instance uuid b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.370805] env[68244]: INFO nova.scheduler.client.report [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Deleted allocations for instance d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88 [ 993.535603] env[68244]: INFO nova.compute.manager [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Took 28.89 seconds to build instance. [ 993.580834] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067642} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.581079] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.581944] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d38f24-5e62-4fef-bcea-b78179e7c77d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.602782] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.603106] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-206dc12f-3116-4a08-9fa5-5851cda68634 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.623232] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 993.623232] env[68244]: value = "task-2780728" [ 993.623232] env[68244]: _type = "Task" [ 993.623232] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.631118] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780728, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.831952] env[68244]: DEBUG nova.objects.base [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 993.877436] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9d188376-4439-42ca-a616-6fc66b3c4e89 tempest-ServerRescueTestJSONUnderV235-1794094085 tempest-ServerRescueTestJSONUnderV235-1794094085-project-member] Lock "d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.044s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.039626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d3b3e5c-9723-4388-a8f5-a2f265a42faa tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.404s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.133709] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780728, 'name': ReconfigVM_Task, 'duration_secs': 0.286739} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.136353] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Reconfigured VM instance instance-0000004a to attach disk [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.137235] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-936bd34a-2bf6-440e-99ac-2f8a67dd7e60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.143990] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 994.143990] env[68244]: value = "task-2780729" [ 994.143990] env[68244]: _type = "Task" [ 994.143990] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.153041] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780729, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.204836] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00af796b-79af-44df-9995-b3a81e042364 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.212490] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5439111b-485e-42e6-ad94-8bee6a19366a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.244079] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b674a73a-6aa0-44fe-8f8e-b601498d2a13 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.252192] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6209c5ce-f7c0-41d7-8868-b5ea58955943 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.266469] env[68244]: DEBUG nova.compute.provider_tree [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.570537] env[68244]: DEBUG nova.network.neutron [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [{"id": "3a4a896b-0463-43a3-8487-d50328142090", "address": "fa:16:3e:2d:95:05", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a4a896b-04", "ovs_interfaceid": "3a4a896b-0463-43a3-8487-d50328142090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.655979] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780729, 'name': Rename_Task, 'duration_secs': 0.172103} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.656179] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.656433] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0085f38d-adae-406f-89f3-380f064f9a59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.663071] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 994.663071] env[68244]: value = "task-2780730" [ 994.663071] env[68244]: _type = "Task" [ 994.663071] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.674352] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780730, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.790545] env[68244]: ERROR nova.scheduler.client.report [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [req-e6e76458-f2e4-4140-aa07-0d0d9dded9e5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e6e76458-f2e4-4140-aa07-0d0d9dded9e5"}]} [ 994.809226] env[68244]: DEBUG nova.scheduler.client.report [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 994.826884] env[68244]: DEBUG nova.scheduler.client.report [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 994.827211] env[68244]: DEBUG nova.compute.provider_tree [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.839762] env[68244]: DEBUG nova.scheduler.client.report [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 994.862259] env[68244]: DEBUG nova.scheduler.client.report [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 995.057810] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "0c336f72-1cb9-468a-bf59-b0de937e1e94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.057942] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.073430] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-e8655168-1fe8-4590-90a3-2ad9438d7761" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.179876] env[68244]: DEBUG oslo_vmware.api [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780730, 'name': PowerOnVM_Task, 'duration_secs': 0.445672} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.180576] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.180828] env[68244]: INFO nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Took 4.35 seconds to spawn the instance on the hypervisor. [ 995.181058] env[68244]: DEBUG nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.182359] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf00125c-e364-4673-977e-ab99fab4d113 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.317034] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2306a4-4ebf-4370-99d2-21dd0cffc124 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.324403] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504a7cce-3cbb-4929-85c3-bf3a5aee8af3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.355755] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29418a52-2382-46d8-9e9a-00190b3a2a3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.363809] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea08d1bd-5601-4f7a-a846-74bb088077c2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.380537] env[68244]: DEBUG nova.compute.provider_tree [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 995.562909] env[68244]: DEBUG nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 995.703573] env[68244]: INFO nova.compute.manager [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Took 27.11 seconds to build instance. [ 995.926956] env[68244]: DEBUG nova.scheduler.client.report [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 995.927790] env[68244]: DEBUG nova.compute.provider_tree [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 108 to 109 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 995.930017] env[68244]: DEBUG nova.compute.provider_tree [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 996.085119] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.085436] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5ad85e7-c594-4820-8a3b-c72cee266c34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.093213] env[68244]: DEBUG oslo_vmware.api [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 996.093213] env[68244]: value = "task-2780731" [ 996.093213] env[68244]: _type = "Task" [ 996.093213] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.101214] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.106924] env[68244]: DEBUG oslo_vmware.api [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.205764] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d3c964ad-3c83-4e57-99ab-04132103e93d tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.623s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.439920] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.089s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.442303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.852s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.442534] env[68244]: DEBUG nova.objects.instance [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lazy-loading 'resources' on Instance uuid 54b9144a-f84a-4be2-b6de-c61af436ec4e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.463644] env[68244]: INFO nova.scheduler.client.report [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleted allocations for instance b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8 [ 996.611598] env[68244]: DEBUG oslo_vmware.api [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780731, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.970462] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cedeb121-f7f1-44b2-b03d-17839bd3d0cc tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.409s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.107824] env[68244]: INFO nova.compute.manager [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Rebuilding instance [ 997.110562] env[68244]: DEBUG oslo_vmware.api [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780731, 'name': PowerOnVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.182050] env[68244]: DEBUG nova.compute.manager [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.183174] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce59e927-5d36-4876-bd3b-f2ea5c5ab749 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.407627] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a5f8c2-41c0-4e12-a6de-a43fe6ec497d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.416848] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e4a48c-9275-4c38-bd81-401f62647268 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.454825] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f49350-3824-4757-985d-1f20ca1e6c6c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.462685] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ad7d17-19c9-4ad9-9ac5-85724452832f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.477192] env[68244]: DEBUG nova.compute.provider_tree [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.607181] env[68244]: DEBUG oslo_vmware.api [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780731, 'name': PowerOnVM_Task, 'duration_secs': 1.104488} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.607658] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.607999] env[68244]: DEBUG nova.compute.manager [None req-8453cb61-5a4f-4baf-870d-33fb14f41470 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.608938] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65066618-8f66-4179-8121-701b9e632225 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.982301] env[68244]: DEBUG nova.scheduler.client.report [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.205418] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.205755] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fedd608-c773-41df-957c-cb6e5b3cf293 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.212949] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 998.212949] env[68244]: value = "task-2780732" [ 998.212949] env[68244]: _type = "Task" [ 998.212949] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.224186] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.489871] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.493650] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.453s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.497238] env[68244]: INFO nova.compute.claims [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 998.523856] env[68244]: INFO nova.scheduler.client.report [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Deleted allocations for instance 54b9144a-f84a-4be2-b6de-c61af436ec4e [ 998.726481] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780732, 'name': PowerOffVM_Task, 'duration_secs': 0.240851} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.726770] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.727693] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.730643] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c884615-b76a-4de8-af5d-3e3b9ee839f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.739475] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.739731] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea3dccbf-7c4d-405e-8a53-072adb2a6f0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.765388] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.765587] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.765771] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Deleting the datastore file [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.766056] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-463e820f-33c4-4eae-adfe-3f1c9d74204d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.774768] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 998.774768] env[68244]: value = "task-2780734" [ 998.774768] env[68244]: _type = "Task" [ 998.774768] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.782792] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.961702] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "e8655168-1fe8-4590-90a3-2ad9438d7761" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.961992] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.962221] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.962405] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.962574] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.964761] env[68244]: INFO nova.compute.manager [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Terminating instance [ 999.040066] env[68244]: DEBUG oslo_concurrency.lockutils [None req-33df87f9-9e69-49c3-9ea7-c067f72ad7cf tempest-ServerMetadataTestJSON-1497518947 tempest-ServerMetadataTestJSON-1497518947-project-member] Lock "54b9144a-f84a-4be2-b6de-c61af436ec4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.240s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.290374] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392264} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.291392] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.292149] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.292489] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.469337] env[68244]: DEBUG nova.compute.manager [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 999.469551] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.472348] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856e5efd-c079-4bbf-83b0-c7f637b17cbb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.481849] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.482636] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e819197-f288-41f4-a679-a7cbdfbc9ce5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.490910] env[68244]: DEBUG oslo_vmware.api [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 999.490910] env[68244]: value = "task-2780735" [ 999.490910] env[68244]: _type = "Task" [ 999.490910] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.506110] env[68244]: DEBUG oslo_vmware.api [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.819955] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Acquiring lock "828865d7-d06a-4683-9149-987e6d9efbd9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.820273] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "828865d7-d06a-4683-9149-987e6d9efbd9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.823098] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Acquiring lock "828865d7-d06a-4683-9149-987e6d9efbd9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.823098] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "828865d7-d06a-4683-9149-987e6d9efbd9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.823098] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "828865d7-d06a-4683-9149-987e6d9efbd9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.828022] env[68244]: INFO nova.compute.manager [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Terminating instance [ 999.987395] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fb5975-c676-4040-802a-fcd5ccf9ca38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.998038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f6644f-5325-40a1-9169-73f5a7b89f51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.005107] env[68244]: DEBUG oslo_vmware.api [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780735, 'name': PowerOffVM_Task, 'duration_secs': 0.210335} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.005776] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.006093] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.006351] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee55ae4f-5485-4d1a-adc0-a54df9b933b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.039662] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87eb32b8-ce32-418f-8680-48d9dc4765ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.047346] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fd6922-4a75-4aa5-b65f-b22c60279231 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.061482] env[68244]: DEBUG nova.compute.provider_tree [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1000.100884] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.101241] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.101513] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleting the datastore file [datastore2] e8655168-1fe8-4590-90a3-2ad9438d7761 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.102116] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58dc32fa-4485-4f94-88f1-696cc2a29fdd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.110578] env[68244]: DEBUG oslo_vmware.api [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1000.110578] env[68244]: value = "task-2780737" [ 1000.110578] env[68244]: _type = "Task" [ 1000.110578] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.117568] env[68244]: DEBUG oslo_vmware.api [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.331384] env[68244]: DEBUG nova.compute.manager [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.331659] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.331942] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e867bd52-e616-4491-88ce-d486ca225393 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.340910] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 1000.340910] env[68244]: value = "task-2780738" [ 1000.340910] env[68244]: _type = "Task" [ 1000.340910] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.350700] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1000.350965] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.351179] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.351379] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.351545] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.351699] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1000.351919] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1000.352090] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1000.352258] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1000.352441] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1000.352619] env[68244]: DEBUG nova.virt.hardware [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1000.353925] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c96b1ef-9b39-483e-be4e-11c888fc5215 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.363889] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780738, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.368141] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5508601c-2337-4c2c-aa8e-cff3154ab68b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.383162] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.388879] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1000.389338] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1000.390417] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13688a2c-fbf7-4d90-9b4d-4fe3e03222f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.408196] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.408196] env[68244]: value = "task-2780739" [ 1000.408196] env[68244]: _type = "Task" [ 1000.408196] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.416332] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780739, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.574605] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "246e079b-9fc1-442f-9c20-4e0c05e152e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.574929] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.588919] env[68244]: ERROR nova.scheduler.client.report [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [req-0d727321-457c-4206-9410-f2171ccbb16b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0d727321-457c-4206-9410-f2171ccbb16b"}]} [ 1000.611022] env[68244]: DEBUG nova.scheduler.client.report [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1000.628343] env[68244]: DEBUG oslo_vmware.api [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217807} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.628343] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.628343] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.628343] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1000.628343] env[68244]: INFO nova.compute.manager [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1000.628343] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1000.628343] env[68244]: DEBUG nova.compute.manager [-] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1000.628343] env[68244]: DEBUG nova.network.neutron [-] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1000.631043] env[68244]: DEBUG nova.scheduler.client.report [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1000.631043] env[68244]: DEBUG nova.compute.provider_tree [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1000.655150] env[68244]: DEBUG nova.scheduler.client.report [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1000.684062] env[68244]: DEBUG nova.scheduler.client.report [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1000.855995] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780738, 'name': PowerOffVM_Task, 'duration_secs': 0.234492} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.856173] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.856268] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1000.857052] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558946', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'name': 'volume-fbca3648-be41-4048-bbb7-c27ab5f4f106', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '828865d7-d06a-4683-9149-987e6d9efbd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'serial': 'fbca3648-be41-4048-bbb7-c27ab5f4f106'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1000.859878] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325908d7-57d9-4f1e-9349-9ea5f49c7b88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.883811] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d672df-9d0b-43fc-9288-660ded203ba3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.891598] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc8df77-a820-4e13-a386-71f4b529e922 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.921912] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c2c7d2-efcc-409d-ada7-fa10ae60325d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.931045] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780739, 'name': CreateVM_Task, 'duration_secs': 0.290677} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.942633] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.943050] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] The volume has not been displaced from its original location: [datastore2] volume-fbca3648-be41-4048-bbb7-c27ab5f4f106/volume-fbca3648-be41-4048-bbb7-c27ab5f4f106.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1000.949406] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Reconfiguring VM instance instance-00000029 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1000.952495] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.952671] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.953026] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1000.953301] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bea3d272-72a2-4c5c-8d97-7b244af33ee5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.967926] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f17c476-b1fc-4fc4-b77b-b93fb90d6064 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.973618] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1000.973618] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52316012-416d-6fa4-03ee-cd8be1d55931" [ 1000.973618] env[68244]: _type = "Task" [ 1000.973618] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.978645] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 1000.978645] env[68244]: value = "task-2780740" [ 1000.978645] env[68244]: _type = "Task" [ 1000.978645] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.985058] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52316012-416d-6fa4-03ee-cd8be1d55931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.994836] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.079941] env[68244]: DEBUG nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1001.254200] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ec9e0b-8eb2-4f78-8689-ee1a59427a98 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.262281] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8778ec64-16b4-474d-a058-44faa318589e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.269124] env[68244]: DEBUG nova.compute.manager [req-fe7298b7-6743-4c0b-80ac-2d59f17b6ec5 req-d1d0e98d-4893-4687-a0a8-5f2877c87f45 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Received event network-vif-deleted-3a4a896b-0463-43a3-8487-d50328142090 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1001.269124] env[68244]: INFO nova.compute.manager [req-fe7298b7-6743-4c0b-80ac-2d59f17b6ec5 req-d1d0e98d-4893-4687-a0a8-5f2877c87f45 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Neutron deleted interface 3a4a896b-0463-43a3-8487-d50328142090; detaching it from the instance and deleting it from the info cache [ 1001.269224] env[68244]: DEBUG nova.network.neutron [req-fe7298b7-6743-4c0b-80ac-2d59f17b6ec5 req-d1d0e98d-4893-4687-a0a8-5f2877c87f45 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.298081] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a8e2d2-6530-45b0-9189-30ce7eddacbd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.307627] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bdac37-229b-4f70-bfe4-95262af5f9db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.323298] env[68244]: DEBUG nova.compute.provider_tree [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1001.486868] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52316012-416d-6fa4-03ee-cd8be1d55931, 'name': SearchDatastore_Task, 'duration_secs': 0.009077} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.487633] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.487898] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.488164] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.488389] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.488596] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.488932] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f40365e-a9ff-456e-974c-5a5fd08059e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.494007] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780740, 'name': ReconfigVM_Task, 'duration_secs': 0.15571} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.494707] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Reconfigured VM instance instance-00000029 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1001.502367] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7881fec-057c-4c74-868c-096bec75075c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.512847] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.513075] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.513746] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64a6f7ff-88ba-420b-9be9-3e93d59c34c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.519551] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1001.519551] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526b6809-9d03-c45b-14b0-cabae2b4a311" [ 1001.519551] env[68244]: _type = "Task" [ 1001.519551] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.523968] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 1001.523968] env[68244]: value = "task-2780741" [ 1001.523968] env[68244]: _type = "Task" [ 1001.523968] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.530368] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526b6809-9d03-c45b-14b0-cabae2b4a311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.535719] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780741, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.605641] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.708899] env[68244]: DEBUG nova.network.neutron [-] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.771608] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-717943df-bec4-4238-ba1d-b0c0826631da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.780903] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f4c73b-3ce5-4037-a551-0f40451438a0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.814701] env[68244]: DEBUG nova.compute.manager [req-fe7298b7-6743-4c0b-80ac-2d59f17b6ec5 req-d1d0e98d-4893-4687-a0a8-5f2877c87f45 service nova] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Detach interface failed, port_id=3a4a896b-0463-43a3-8487-d50328142090, reason: Instance e8655168-1fe8-4590-90a3-2ad9438d7761 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1001.873148] env[68244]: DEBUG nova.scheduler.client.report [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1001.873438] env[68244]: DEBUG nova.compute.provider_tree [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 110 to 111 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1001.873619] env[68244]: DEBUG nova.compute.provider_tree [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1002.033146] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526b6809-9d03-c45b-14b0-cabae2b4a311, 'name': SearchDatastore_Task, 'duration_secs': 0.010878} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.034304] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22dff71f-68ba-40be-8a33-3d75f9d87540 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.040144] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780741, 'name': ReconfigVM_Task, 'duration_secs': 0.295615} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.040895] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-558946', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'name': 'volume-fbca3648-be41-4048-bbb7-c27ab5f4f106', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '828865d7-d06a-4683-9149-987e6d9efbd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbca3648-be41-4048-bbb7-c27ab5f4f106', 'serial': 'fbca3648-be41-4048-bbb7-c27ab5f4f106'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1002.041435] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.042305] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1a6a38-59d6-461b-881e-35db3a5f5fc7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.047737] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1002.047737] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52af4000-9c60-da23-3cb0-ccef2d7447a2" [ 1002.047737] env[68244]: _type = "Task" [ 1002.047737] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.053449] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.054030] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08a7986a-ef92-42a0-916d-1617c48fe4a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.060215] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52af4000-9c60-da23-3cb0-ccef2d7447a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.145668] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.145965] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.146096] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Deleting the datastore file [datastore2] 828865d7-d06a-4683-9149-987e6d9efbd9 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.146372] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8577ef52-d5f0-487c-bfe7-8829f4ee877b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.154445] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for the task: (returnval){ [ 1002.154445] env[68244]: value = "task-2780743" [ 1002.154445] env[68244]: _type = "Task" [ 1002.154445] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.163354] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780743, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.212244] env[68244]: INFO nova.compute.manager [-] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Took 1.58 seconds to deallocate network for instance. [ 1002.379624] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.886s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.380201] env[68244]: DEBUG nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1002.386653] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.747s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.386886] env[68244]: DEBUG nova.objects.instance [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lazy-loading 'resources' on Instance uuid 91d45b22-7963-4615-8455-7d910a9a0fed {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.559156] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52af4000-9c60-da23-3cb0-ccef2d7447a2, 'name': SearchDatastore_Task, 'duration_secs': 0.029545} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.559427] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.559686] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.559943] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7db231e0-0374-4a7a-b107-0d94024c1164 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.567474] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1002.567474] env[68244]: value = "task-2780744" [ 1002.567474] env[68244]: _type = "Task" [ 1002.567474] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.576841] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780744, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.664082] env[68244]: DEBUG oslo_vmware.api [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Task: {'id': task-2780743, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160422} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.664740] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.664740] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.664740] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.664916] env[68244]: INFO nova.compute.manager [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Took 2.33 seconds to destroy the instance on the hypervisor. [ 1002.665200] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1002.665398] env[68244]: DEBUG nova.compute.manager [-] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1002.665614] env[68244]: DEBUG nova.network.neutron [-] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.720346] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.890654] env[68244]: DEBUG nova.compute.utils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1002.899635] env[68244]: DEBUG nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1002.899635] env[68244]: DEBUG nova.network.neutron [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1003.017062] env[68244]: DEBUG nova.policy [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e2b78ca269843a0a5541e44727d807b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaf55a7bfa5948d1837855650c1c960b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1003.079672] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780744, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447619} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.082912] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.083367] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.083934] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6dfcfec9-6502-4b58-9916-c4a13724cbd4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.091220] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1003.091220] env[68244]: value = "task-2780745" [ 1003.091220] env[68244]: _type = "Task" [ 1003.091220] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.102290] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780745, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.378211] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e05a9e8-e1f6-4a00-88bb-2fd4950c85eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.387073] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f33ebf9-5dd4-457f-b322-0c4e859b8b95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.423580] env[68244]: DEBUG nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1003.427786] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45105611-84fe-41e3-bb92-209c5cbfa1bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.437022] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a012a2-e751-4b35-8dcf-1d1318f0ad6d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.453559] env[68244]: DEBUG nova.compute.provider_tree [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.605370] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780745, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069019} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.605370] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.605370] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67babd73-a321-496c-a78c-448133367f89 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.629724] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.630117] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c571cbe-2cd8-4410-bc25-95bd577abdf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.655806] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1003.655806] env[68244]: value = "task-2780746" [ 1003.655806] env[68244]: _type = "Task" [ 1003.655806] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.665275] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780746, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.747460] env[68244]: DEBUG nova.network.neutron [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Successfully created port: c5c56d0b-9541-4af9-9b67-3e468da9557f {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.956346] env[68244]: DEBUG nova.scheduler.client.report [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1003.974684] env[68244]: DEBUG nova.compute.manager [req-2a933560-22eb-439c-ae63-061a753e9867 req-7d6ba3f3-a07e-4f68-b6af-249f4e7b1ab3 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Received event network-vif-deleted-e14972f2-13d5-417d-9c9b-9a0f731e4e44 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1003.977633] env[68244]: INFO nova.compute.manager [req-2a933560-22eb-439c-ae63-061a753e9867 req-7d6ba3f3-a07e-4f68-b6af-249f4e7b1ab3 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Neutron deleted interface e14972f2-13d5-417d-9c9b-9a0f731e4e44; detaching it from the instance and deleting it from the info cache [ 1003.977633] env[68244]: DEBUG nova.network.neutron [req-2a933560-22eb-439c-ae63-061a753e9867 req-7d6ba3f3-a07e-4f68-b6af-249f4e7b1ab3 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.168674] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780746, 'name': ReconfigVM_Task, 'duration_secs': 0.381726} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.169152] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Reconfigured VM instance instance-0000004a to attach disk [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c/aa5a373e-b34a-4f94-912b-0c7d20fc5b6c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.170035] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42e1347b-590b-4511-bd20-31c8b695d21e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.178132] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1004.178132] env[68244]: value = "task-2780747" [ 1004.178132] env[68244]: _type = "Task" [ 1004.178132] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.189258] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780747, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.434474] env[68244]: DEBUG nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1004.447836] env[68244]: DEBUG nova.network.neutron [-] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.463255] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.080s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.468457] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.500s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.469758] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.472472] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.060s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.473980] env[68244]: INFO nova.compute.claims [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.480927] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ed6bfd1-552e-4500-856b-1a6c13143d07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.489033] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1004.489609] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.491296] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.491296] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.491296] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.491296] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1004.491296] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1004.491886] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1004.492139] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1004.492359] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1004.492575] env[68244]: DEBUG nova.virt.hardware [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1004.494038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a95735c-f196-4363-9fef-c800a3db8e8d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.503100] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0411281d-04b8-40f1-b900-5c159d0c08ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.520099] env[68244]: INFO nova.scheduler.client.report [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted allocations for instance 91d45b22-7963-4615-8455-7d910a9a0fed [ 1004.524764] env[68244]: INFO nova.scheduler.client.report [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted allocations for instance 2487689d-7a83-49d7-be78-fbb946ebef8c [ 1004.533670] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70972de4-6767-433b-8e2b-d8be9ad8da80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.565771] env[68244]: DEBUG nova.compute.manager [req-2a933560-22eb-439c-ae63-061a753e9867 req-7d6ba3f3-a07e-4f68-b6af-249f4e7b1ab3 service nova] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Detach interface failed, port_id=e14972f2-13d5-417d-9c9b-9a0f731e4e44, reason: Instance 828865d7-d06a-4683-9149-987e6d9efbd9 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1004.688884] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780747, 'name': Rename_Task, 'duration_secs': 0.134747} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.689196] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.689462] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b4f2caa-0df7-4e32-a79a-f2b7df835279 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.696239] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1004.696239] env[68244]: value = "task-2780748" [ 1004.696239] env[68244]: _type = "Task" [ 1004.696239] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.706377] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.950600] env[68244]: INFO nova.compute.manager [-] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Took 2.28 seconds to deallocate network for instance. [ 1005.043946] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9dcf0e43-3002-4dd7-be91-40e0eb1e0847 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "91d45b22-7963-4615-8455-7d910a9a0fed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.805s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.045586] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c4527b3-3711-4862-88a8-518a21937ce4 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "2487689d-7a83-49d7-be78-fbb946ebef8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.528s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.209611] env[68244]: DEBUG oslo_vmware.api [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780748, 'name': PowerOnVM_Task, 'duration_secs': 0.493044} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.210234] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.210638] env[68244]: DEBUG nova.compute.manager [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.211696] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98d7f50-be4d-4558-838e-9ba8c16a08a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.511355] env[68244]: INFO nova.compute.manager [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Took 0.56 seconds to detach 1 volumes for instance. [ 1005.516285] env[68244]: DEBUG nova.compute.manager [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Deleting volume: fbca3648-be41-4048-bbb7-c27ab5f4f106 {{(pid=68244) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1005.573925] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.574371] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.574682] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.575012] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.575286] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.579939] env[68244]: INFO nova.compute.manager [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Terminating instance [ 1005.688510] env[68244]: DEBUG nova.network.neutron [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Successfully updated port: c5c56d0b-9541-4af9-9b67-3e468da9557f {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.732191] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.919267] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c909f4-0152-4387-98f9-53c9aca97c5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.930341] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d466ce6-2914-4f3d-a417-544b8f27a8ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.968634] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f743839-6d2e-4c0f-8776-5d822ec86893 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.975664] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051986c7-ec34-468f-9f91-53b8a7e9d83b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.991982] env[68244]: DEBUG nova.compute.provider_tree [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.078336] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.084115] env[68244]: DEBUG nova.compute.manager [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1006.084338] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1006.085230] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca7f4bf-15aa-431d-a227-a9152ebc7104 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.093243] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.093583] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-413488b8-52b9-4e90-92ec-6208c315f03d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.099749] env[68244]: DEBUG oslo_vmware.api [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1006.099749] env[68244]: value = "task-2780750" [ 1006.099749] env[68244]: _type = "Task" [ 1006.099749] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.107163] env[68244]: DEBUG oslo_vmware.api [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.146971] env[68244]: DEBUG nova.compute.manager [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Received event network-vif-plugged-c5c56d0b-9541-4af9-9b67-3e468da9557f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1006.147322] env[68244]: DEBUG oslo_concurrency.lockutils [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] Acquiring lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.147513] env[68244]: DEBUG oslo_concurrency.lockutils [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] Lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.147745] env[68244]: DEBUG oslo_concurrency.lockutils [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] Lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.147939] env[68244]: DEBUG nova.compute.manager [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] No waiting events found dispatching network-vif-plugged-c5c56d0b-9541-4af9-9b67-3e468da9557f {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1006.148202] env[68244]: WARNING nova.compute.manager [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Received unexpected event network-vif-plugged-c5c56d0b-9541-4af9-9b67-3e468da9557f for instance with vm_state building and task_state spawning. [ 1006.148589] env[68244]: DEBUG nova.compute.manager [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Received event network-changed-c5c56d0b-9541-4af9-9b67-3e468da9557f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1006.149079] env[68244]: DEBUG nova.compute.manager [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Refreshing instance network info cache due to event network-changed-c5c56d0b-9541-4af9-9b67-3e468da9557f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1006.149422] env[68244]: DEBUG oslo_concurrency.lockutils [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] Acquiring lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.149553] env[68244]: DEBUG oslo_concurrency.lockutils [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] Acquired lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.149813] env[68244]: DEBUG nova.network.neutron [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Refreshing network info cache for port c5c56d0b-9541-4af9-9b67-3e468da9557f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1006.190402] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.301945] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.302244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.495075] env[68244]: DEBUG nova.scheduler.client.report [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.610495] env[68244]: DEBUG oslo_vmware.api [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780750, 'name': PowerOffVM_Task, 'duration_secs': 0.196829} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.610796] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1006.611018] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1006.611306] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bed4d98-7bcd-432c-bc5d-75cb870a3922 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.675414] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1006.675754] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1006.675884] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleting the datastore file [datastore2] c73d39d9-1fb7-4ce7-8d60-9243bd6f519f {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1006.676615] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02cd3945-f1c9-4b7d-8be8-57acdbfbabbc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.684557] env[68244]: DEBUG oslo_vmware.api [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1006.684557] env[68244]: value = "task-2780752" [ 1006.684557] env[68244]: _type = "Task" [ 1006.684557] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.688249] env[68244]: DEBUG nova.network.neutron [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.694510] env[68244]: DEBUG oslo_vmware.api [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780752, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.781410] env[68244]: DEBUG nova.network.neutron [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.807328] env[68244]: DEBUG nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1006.811756] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.812178] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.812806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.812806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.812946] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.816055] env[68244]: INFO nova.compute.manager [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Terminating instance [ 1006.881889] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.881889] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.000574] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.001215] env[68244]: DEBUG nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1007.003812] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.403s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.005220] env[68244]: INFO nova.compute.claims [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.196140] env[68244]: DEBUG oslo_vmware.api [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780752, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143068} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.196416] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1007.196600] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1007.197096] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1007.197328] env[68244]: INFO nova.compute.manager [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1007.197681] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.197933] env[68244]: DEBUG nova.compute.manager [-] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1007.198072] env[68244]: DEBUG nova.network.neutron [-] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1007.286453] env[68244]: DEBUG oslo_concurrency.lockutils [req-af2c4347-b58f-4f52-9c06-af9060175973 req-f62c1e5e-2211-44b3-ab70-bbf0842c03ce service nova] Releasing lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.286849] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.287053] env[68244]: DEBUG nova.network.neutron [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.320858] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "refresh_cache-aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.321060] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquired lock "refresh_cache-aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.322159] env[68244]: DEBUG nova.network.neutron [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.335483] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.383438] env[68244]: DEBUG nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1007.509834] env[68244]: DEBUG nova.compute.utils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1007.514026] env[68244]: DEBUG nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1007.514026] env[68244]: DEBUG nova.network.neutron [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1007.565009] env[68244]: DEBUG nova.policy [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28af72bbea7d4a829a5494e62fb6675d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd580b3bcca3e4a8b9367a20d3d2b5e3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1007.836473] env[68244]: DEBUG nova.network.neutron [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1007.865436] env[68244]: DEBUG nova.network.neutron [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1007.911862] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.944739] env[68244]: DEBUG nova.network.neutron [-] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.966906] env[68244]: DEBUG nova.network.neutron [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.015792] env[68244]: DEBUG nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1008.098256] env[68244]: DEBUG nova.network.neutron [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance_info_cache with network_info: [{"id": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "address": "fa:16:3e:cb:96:5a", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5c56d0b-95", "ovs_interfaceid": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.109226] env[68244]: DEBUG nova.network.neutron [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Successfully created port: 5da84ae5-3ae6-4d70-b9c2-8281d992dec1 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1008.241121] env[68244]: DEBUG nova.compute.manager [req-9a21ed0d-c604-4228-bca5-1f2b18b091db req-3e193b6f-6582-45a4-b1f8-579f64ad3b28 service nova] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Received event network-vif-deleted-aec282ae-c918-4d46-993a-8beba0b62926 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1008.430359] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc006443-7d4a-4a09-928c-723744d2105e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.437991] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49d2711-33bb-463e-b2c7-fec0f5432528 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.468536] env[68244]: INFO nova.compute.manager [-] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Took 1.27 seconds to deallocate network for instance. [ 1008.469222] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Releasing lock "refresh_cache-aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.469602] env[68244]: DEBUG nova.compute.manager [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.469792] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.472013] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9422d77e-8cf3-48d9-970a-85b2c0da641d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.477516] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0104d96-c762-430b-a5a0-16c6ce37abd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.487174] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.487824] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cbef3a7-69ae-4820-b2bf-c273766fb6eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.490104] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32be48e-1fe7-46a8-afe4-016f2219200c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.505729] env[68244]: DEBUG nova.compute.provider_tree [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.509044] env[68244]: DEBUG oslo_vmware.api [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1008.509044] env[68244]: value = "task-2780753" [ 1008.509044] env[68244]: _type = "Task" [ 1008.509044] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.519267] env[68244]: DEBUG oslo_vmware.api [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.605654] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.606073] env[68244]: DEBUG nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Instance network_info: |[{"id": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "address": "fa:16:3e:cb:96:5a", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5c56d0b-95", "ovs_interfaceid": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1008.606519] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:96:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5c56d0b-9541-4af9-9b67-3e468da9557f', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1008.614758] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1008.615014] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1008.615248] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-154917d7-f39c-471f-830f-56d13a7ec081 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.635294] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1008.635294] env[68244]: value = "task-2780754" [ 1008.635294] env[68244]: _type = "Task" [ 1008.635294] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.643167] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780754, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.983036] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.010484] env[68244]: DEBUG nova.scheduler.client.report [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.024344] env[68244]: DEBUG oslo_vmware.api [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780753, 'name': PowerOffVM_Task, 'duration_secs': 0.129105} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.024566] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.024734] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.024984] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2915f97b-bfd1-4366-94f5-bbda25278d30 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.027869] env[68244]: DEBUG nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1009.052177] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.052428] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.052611] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Deleting the datastore file [datastore2] aa5a373e-b34a-4f94-912b-0c7d20fc5b6c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.052899] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59519a74-a719-43e6-95f2-8f2576830cd0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.057452] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.057770] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.057930] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.058126] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.058272] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.058418] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.058619] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.058794] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.058967] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.059157] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.059329] env[68244]: DEBUG nova.virt.hardware [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.060138] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd243501-2fa3-46c1-b7cf-04bfc912ee2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.063995] env[68244]: DEBUG oslo_vmware.api [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for the task: (returnval){ [ 1009.063995] env[68244]: value = "task-2780756" [ 1009.063995] env[68244]: _type = "Task" [ 1009.063995] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.071215] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888d6873-e2ee-42be-9963-e2c96db89874 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.077462] env[68244]: DEBUG oslo_vmware.api [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.145296] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780754, 'name': CreateVM_Task, 'duration_secs': 0.375971} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.145583] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1009.146232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.146386] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.146779] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1009.146974] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c089f240-a8de-40e6-8c8a-4197b55a37fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.153079] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1009.153079] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7ea2e-3e96-92b0-5306-6f99a1e5d8a9" [ 1009.153079] env[68244]: _type = "Task" [ 1009.153079] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.162457] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7ea2e-3e96-92b0-5306-6f99a1e5d8a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.518821] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.515s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.519409] env[68244]: DEBUG nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1009.522325] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.415s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.523532] env[68244]: DEBUG nova.objects.instance [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lazy-loading 'resources' on Instance uuid 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.577025] env[68244]: DEBUG oslo_vmware.api [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Task: {'id': task-2780756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099566} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.577292] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.577476] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.578449] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.578449] env[68244]: INFO nova.compute.manager [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1009.578449] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.578449] env[68244]: DEBUG nova.compute.manager [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.578449] env[68244]: DEBUG nova.network.neutron [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.596577] env[68244]: DEBUG nova.network.neutron [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.608180] env[68244]: DEBUG nova.network.neutron [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Successfully updated port: 5da84ae5-3ae6-4d70-b9c2-8281d992dec1 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1009.663682] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7ea2e-3e96-92b0-5306-6f99a1e5d8a9, 'name': SearchDatastore_Task, 'duration_secs': 0.010359} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.663995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.664256] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.664486] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.664634] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.664811] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.665128] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a25e48f-0bbb-4c1e-aa44-b05159997da4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.673338] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.673442] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1009.674084] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b07ad77c-8c8e-4114-b0dd-18c609f87a21 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.679135] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1009.679135] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b54e9e-4cfe-71e2-0671-78fea2408f22" [ 1009.679135] env[68244]: _type = "Task" [ 1009.679135] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.686695] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b54e9e-4cfe-71e2-0671-78fea2408f22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.025837] env[68244]: DEBUG nova.compute.utils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1010.030450] env[68244]: DEBUG nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1010.030645] env[68244]: DEBUG nova.network.neutron [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1010.081720] env[68244]: DEBUG nova.policy [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28af72bbea7d4a829a5494e62fb6675d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd580b3bcca3e4a8b9367a20d3d2b5e3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1010.101323] env[68244]: DEBUG nova.network.neutron [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.111237] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "refresh_cache-b84c2c08-651a-407d-89dd-177bc5d90313" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.111237] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired lock "refresh_cache-b84c2c08-651a-407d-89dd-177bc5d90313" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.111237] env[68244]: DEBUG nova.network.neutron [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1010.196757] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b54e9e-4cfe-71e2-0671-78fea2408f22, 'name': SearchDatastore_Task, 'duration_secs': 0.008355} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.197705] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99966fdd-d88f-4ca4-9393-068204a47330 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.207331] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1010.207331] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5221e631-2d94-4348-c2ea-220ff8811be4" [ 1010.207331] env[68244]: _type = "Task" [ 1010.207331] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.217972] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5221e631-2d94-4348-c2ea-220ff8811be4, 'name': SearchDatastore_Task, 'duration_secs': 0.009852} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.220833] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.221244] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 9658b4e0-f4f9-4628-b700-19d94800961c/9658b4e0-f4f9-4628-b700-19d94800961c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1010.221656] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb8299b7-7fc4-44cd-b3c3-53707600fc8c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.228511] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1010.228511] env[68244]: value = "task-2780757" [ 1010.228511] env[68244]: _type = "Task" [ 1010.228511] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.239186] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.378479] env[68244]: DEBUG nova.network.neutron [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Successfully created port: 8cb9c661-5875-4af2-9420-68539b4270e7 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1010.456085] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba343e6b-713f-4a4b-a83a-94667f26aac9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.467272] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f61230-78d3-4d38-97cf-ed4b8396de58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.502872] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba91110-8d7e-46a7-91df-9052e4f58b73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.506782] env[68244]: DEBUG nova.compute.manager [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Received event network-vif-plugged-5da84ae5-3ae6-4d70-b9c2-8281d992dec1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1010.507043] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] Acquiring lock "b84c2c08-651a-407d-89dd-177bc5d90313-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.507373] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] Lock "b84c2c08-651a-407d-89dd-177bc5d90313-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.507481] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] Lock "b84c2c08-651a-407d-89dd-177bc5d90313-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.507695] env[68244]: DEBUG nova.compute.manager [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] No waiting events found dispatching network-vif-plugged-5da84ae5-3ae6-4d70-b9c2-8281d992dec1 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1010.507867] env[68244]: WARNING nova.compute.manager [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Received unexpected event network-vif-plugged-5da84ae5-3ae6-4d70-b9c2-8281d992dec1 for instance with vm_state building and task_state spawning. [ 1010.508038] env[68244]: DEBUG nova.compute.manager [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Received event network-changed-5da84ae5-3ae6-4d70-b9c2-8281d992dec1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1010.508201] env[68244]: DEBUG nova.compute.manager [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Refreshing instance network info cache due to event network-changed-5da84ae5-3ae6-4d70-b9c2-8281d992dec1. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1010.508370] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] Acquiring lock "refresh_cache-b84c2c08-651a-407d-89dd-177bc5d90313" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.516901] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268db8db-93c6-4429-8f16-ffa8004bf4af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.536979] env[68244]: DEBUG nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1010.540042] env[68244]: DEBUG nova.compute.provider_tree [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.606547] env[68244]: INFO nova.compute.manager [-] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Took 1.03 seconds to deallocate network for instance. [ 1010.674905] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "c70fb986-8396-4f11-98c4-1ed977a23bcd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.677870] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.677870] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "c70fb986-8396-4f11-98c4-1ed977a23bcd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.677870] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.677870] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.679534] env[68244]: INFO nova.compute.manager [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Terminating instance [ 1010.741022] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457449} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.741022] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 9658b4e0-f4f9-4628-b700-19d94800961c/9658b4e0-f4f9-4628-b700-19d94800961c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.741022] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.741022] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b73c14a0-2d44-4fcf-9d41-4feb5ecf106a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.745789] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1010.745789] env[68244]: value = "task-2780758" [ 1010.745789] env[68244]: _type = "Task" [ 1010.745789] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.754897] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.842492] env[68244]: DEBUG nova.network.neutron [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1010.987730] env[68244]: DEBUG nova.network.neutron [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Updating instance_info_cache with network_info: [{"id": "5da84ae5-3ae6-4d70-b9c2-8281d992dec1", "address": "fa:16:3e:8e:64:4b", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da84ae5-3a", "ovs_interfaceid": "5da84ae5-3ae6-4d70-b9c2-8281d992dec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.047492] env[68244]: DEBUG nova.scheduler.client.report [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.119639] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.184125] env[68244]: DEBUG nova.compute.manager [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1011.184378] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.185359] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3b5e04-fac9-4d5c-9dcc-998d7addf432 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.193269] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.193469] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71bee6c4-02c5-49c4-b2eb-9240a1419e16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.196435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.196681] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.196851] env[68244]: INFO nova.compute.manager [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Shelving [ 1011.202117] env[68244]: DEBUG oslo_vmware.api [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 1011.202117] env[68244]: value = "task-2780759" [ 1011.202117] env[68244]: _type = "Task" [ 1011.202117] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.210875] env[68244]: DEBUG oslo_vmware.api [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.255567] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062562} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.255830] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.256593] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9146a6c0-b804-4fff-8ce6-7d876fd6ba5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.280329] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 9658b4e0-f4f9-4628-b700-19d94800961c/9658b4e0-f4f9-4628-b700-19d94800961c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.280916] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a06c97cf-2195-4295-9d3b-8d366ac720ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.301973] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1011.301973] env[68244]: value = "task-2780760" [ 1011.301973] env[68244]: _type = "Task" [ 1011.301973] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.309993] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780760, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.492377] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Releasing lock "refresh_cache-b84c2c08-651a-407d-89dd-177bc5d90313" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.492729] env[68244]: DEBUG nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Instance network_info: |[{"id": "5da84ae5-3ae6-4d70-b9c2-8281d992dec1", "address": "fa:16:3e:8e:64:4b", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da84ae5-3a", "ovs_interfaceid": "5da84ae5-3ae6-4d70-b9c2-8281d992dec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1011.493040] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] Acquired lock "refresh_cache-b84c2c08-651a-407d-89dd-177bc5d90313" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.493231] env[68244]: DEBUG nova.network.neutron [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Refreshing network info cache for port 5da84ae5-3ae6-4d70-b9c2-8281d992dec1 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1011.494926] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:64:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5da84ae5-3ae6-4d70-b9c2-8281d992dec1', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1011.501830] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Creating folder: Project (d580b3bcca3e4a8b9367a20d3d2b5e3b). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1011.505052] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b1bb895-29d4-47a4-820f-13bcf1c51bf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.516400] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Created folder: Project (d580b3bcca3e4a8b9367a20d3d2b5e3b) in parent group-v558876. [ 1011.516400] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Creating folder: Instances. Parent ref: group-v559075. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1011.516400] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-879f8753-490d-4a33-b081-b4d26442eacf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.525724] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Created folder: Instances in parent group-v559075. [ 1011.525993] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1011.526239] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1011.526587] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8bcf93d-731a-444f-9484-567ca79bf647 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.550963] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1011.550963] env[68244]: value = "task-2780763" [ 1011.550963] env[68244]: _type = "Task" [ 1011.550963] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.557161] env[68244]: DEBUG nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1011.561501] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.563084] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.412s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.563084] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.564868] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.619s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.564868] env[68244]: DEBUG nova.objects.instance [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lazy-loading 'resources' on Instance uuid 45ec526b-e9d8-4ea3-b0c8-af6da39b0158 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.572234] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780763, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.590298] env[68244]: INFO nova.scheduler.client.report [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Deleted allocations for instance d74a0d56-8656-429c-a703-fca87e07798f [ 1011.593036] env[68244]: INFO nova.scheduler.client.report [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Deleted allocations for instance 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598 [ 1011.596996] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1011.597273] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.597430] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1011.597611] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.597757] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1011.597920] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1011.598149] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1011.598322] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1011.598876] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1011.598876] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1011.598876] env[68244]: DEBUG nova.virt.hardware [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1011.599973] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979e9218-659e-49e5-aee2-8eeede0e5b6c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.614305] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cb4244-473c-4a5a-b1c5-296b6e210300 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.713402] env[68244]: DEBUG oslo_vmware.api [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780759, 'name': PowerOffVM_Task, 'duration_secs': 0.222859} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.715850] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1011.716067] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1011.716825] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b85d0a3-5d19-4056-a272-d19abc91b933 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.747902] env[68244]: DEBUG nova.network.neutron [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Updated VIF entry in instance network info cache for port 5da84ae5-3ae6-4d70-b9c2-8281d992dec1. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.748401] env[68244]: DEBUG nova.network.neutron [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Updating instance_info_cache with network_info: [{"id": "5da84ae5-3ae6-4d70-b9c2-8281d992dec1", "address": "fa:16:3e:8e:64:4b", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5da84ae5-3a", "ovs_interfaceid": "5da84ae5-3ae6-4d70-b9c2-8281d992dec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.779798] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.780036] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.780274] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Deleting the datastore file [datastore1] c70fb986-8396-4f11-98c4-1ed977a23bcd {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.780599] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3d05487-6c1a-4755-871b-2c6e77d2a78d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.787336] env[68244]: DEBUG oslo_vmware.api [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for the task: (returnval){ [ 1011.787336] env[68244]: value = "task-2780765" [ 1011.787336] env[68244]: _type = "Task" [ 1011.787336] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.796564] env[68244]: DEBUG oslo_vmware.api [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.810362] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780760, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.069800] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780763, 'name': CreateVM_Task, 'duration_secs': 0.39141} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.070199] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1012.071029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.071263] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.071677] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1012.072011] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dc45349-c377-4063-8723-1591b0f3fc20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.076892] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1012.076892] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52695f85-6210-9463-66c3-5e70d4cac50a" [ 1012.076892] env[68244]: _type = "Task" [ 1012.076892] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.086983] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52695f85-6210-9463-66c3-5e70d4cac50a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.106839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3e17ab80-ae7b-48b5-8fa7-202a6bf939b3 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "d74a0d56-8656-429c-a703-fca87e07798f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.324s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.113525] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d6a80d4d-fd58-4ff8-988f-231b57cf8059 tempest-InstanceActionsNegativeTestJSON-611369663 tempest-InstanceActionsNegativeTestJSON-611369663-project-member] Lock "4fe60c1b-fbfb-4bf0-b52a-7920fa87f598" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.389s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.134133] env[68244]: DEBUG nova.network.neutron [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Successfully updated port: 8cb9c661-5875-4af2-9420-68539b4270e7 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1012.210048] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.210048] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afab67f7-d934-4c33-b355-21dd9994be3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.218239] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1012.218239] env[68244]: value = "task-2780766" [ 1012.218239] env[68244]: _type = "Task" [ 1012.218239] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.226224] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.253799] env[68244]: DEBUG oslo_concurrency.lockutils [req-aa3e7f20-cb72-4520-a78a-eb79bac31dcb req-9ec3212b-7370-4a22-b869-1d606eecbb7d service nova] Releasing lock "refresh_cache-b84c2c08-651a-407d-89dd-177bc5d90313" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.297435] env[68244]: DEBUG oslo_vmware.api [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Task: {'id': task-2780765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144214} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.297694] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.297874] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.298068] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.298415] env[68244]: INFO nova.compute.manager [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1012.298541] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1012.301095] env[68244]: DEBUG nova.compute.manager [-] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1012.301248] env[68244]: DEBUG nova.network.neutron [-] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1012.311726] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780760, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.427922] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab8c388-1f2b-4416-a3be-1d6755ff6cd0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.435955] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b4e7a7-520d-4c75-a3df-58979f70b46c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.466955] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047fd577-351b-4400-9fc8-9357428d0e34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.474327] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73de10a4-9ef4-4957-ba02-cc39980b0dbf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.489461] env[68244]: DEBUG nova.compute.provider_tree [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.515617] env[68244]: DEBUG nova.compute.manager [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Received event network-vif-plugged-8cb9c661-5875-4af2-9420-68539b4270e7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1012.515838] env[68244]: DEBUG oslo_concurrency.lockutils [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] Acquiring lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.516055] env[68244]: DEBUG oslo_concurrency.lockutils [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.516229] env[68244]: DEBUG oslo_concurrency.lockutils [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.516472] env[68244]: DEBUG nova.compute.manager [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] No waiting events found dispatching network-vif-plugged-8cb9c661-5875-4af2-9420-68539b4270e7 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1012.516663] env[68244]: WARNING nova.compute.manager [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Received unexpected event network-vif-plugged-8cb9c661-5875-4af2-9420-68539b4270e7 for instance with vm_state building and task_state spawning. [ 1012.516818] env[68244]: DEBUG nova.compute.manager [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Received event network-changed-8cb9c661-5875-4af2-9420-68539b4270e7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1012.516976] env[68244]: DEBUG nova.compute.manager [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Refreshing instance network info cache due to event network-changed-8cb9c661-5875-4af2-9420-68539b4270e7. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1012.517174] env[68244]: DEBUG oslo_concurrency.lockutils [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] Acquiring lock "refresh_cache-f2e57bf9-05ee-49d8-846d-c3bf5920ae96" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.517308] env[68244]: DEBUG oslo_concurrency.lockutils [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] Acquired lock "refresh_cache-f2e57bf9-05ee-49d8-846d-c3bf5920ae96" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.517459] env[68244]: DEBUG nova.network.neutron [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Refreshing network info cache for port 8cb9c661-5875-4af2-9420-68539b4270e7 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.587078] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52695f85-6210-9463-66c3-5e70d4cac50a, 'name': SearchDatastore_Task, 'duration_secs': 0.009656} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.587386] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.587620] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.587852] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.588061] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.588276] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.588840] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e46236d6-7846-4bb3-8941-c000864c4460 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.597288] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.597468] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.599798] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce9c74f0-fc6f-40df-8532-792419b47e85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.606349] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1012.606349] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520d0c63-b390-bcc0-ffcc-bff405a8f9c7" [ 1012.606349] env[68244]: _type = "Task" [ 1012.606349] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.612529] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520d0c63-b390-bcc0-ffcc-bff405a8f9c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.640329] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "refresh_cache-f2e57bf9-05ee-49d8-846d-c3bf5920ae96" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.726909] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780766, 'name': PowerOffVM_Task, 'duration_secs': 0.230722} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.727213] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.727997] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0006467-f089-4676-8e8e-07b53d125c39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.747658] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55eb5f16-ade4-4705-ab04-cff634d324ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.815766] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780760, 'name': ReconfigVM_Task, 'duration_secs': 1.294961} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.815766] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 9658b4e0-f4f9-4628-b700-19d94800961c/9658b4e0-f4f9-4628-b700-19d94800961c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.815766] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b51b2d63-ee78-4a43-b855-06acd80594fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.824164] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1012.824164] env[68244]: value = "task-2780767" [ 1012.824164] env[68244]: _type = "Task" [ 1012.824164] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.830298] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780767, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.995028] env[68244]: DEBUG nova.scheduler.client.report [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.050463] env[68244]: DEBUG nova.network.neutron [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1013.097562] env[68244]: DEBUG nova.network.neutron [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.115852] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520d0c63-b390-bcc0-ffcc-bff405a8f9c7, 'name': SearchDatastore_Task, 'duration_secs': 0.015152} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.116628] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c50f5de9-4d4d-4b25-b5bb-49aaec5e9abb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.122065] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1013.122065] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a11701-47e0-5437-4580-42046163f89c" [ 1013.122065] env[68244]: _type = "Task" [ 1013.122065] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.131739] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a11701-47e0-5437-4580-42046163f89c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.148402] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "10957648-8618-4f2c-8b08-5468bca20cfc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.148651] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.148954] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.149212] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.149414] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.151591] env[68244]: INFO nova.compute.manager [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Terminating instance [ 1013.259619] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1013.259935] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9fc6f415-7ec3-4426-9650-5938f7d17184 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.267499] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1013.267499] env[68244]: value = "task-2780768" [ 1013.267499] env[68244]: _type = "Task" [ 1013.267499] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.279931] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780768, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.285020] env[68244]: DEBUG nova.network.neutron [-] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.336031] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780767, 'name': Rename_Task, 'duration_secs': 0.169888} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.336320] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.336606] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1e9f48e-ede7-4a50-acc7-cfdddd17d007 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.345023] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1013.345023] env[68244]: value = "task-2780769" [ 1013.345023] env[68244]: _type = "Task" [ 1013.345023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.353478] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.501023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.501401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.721s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.503203] env[68244]: INFO nova.compute.claims [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.527590] env[68244]: INFO nova.scheduler.client.report [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted allocations for instance 45ec526b-e9d8-4ea3-b0c8-af6da39b0158 [ 1013.600729] env[68244]: DEBUG oslo_concurrency.lockutils [req-74c01a47-a4cb-45de-ab56-e6d8eb154b7d req-f7b2ef80-2181-40fe-936c-504d8dcde057 service nova] Releasing lock "refresh_cache-f2e57bf9-05ee-49d8-846d-c3bf5920ae96" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.601169] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired lock "refresh_cache-f2e57bf9-05ee-49d8-846d-c3bf5920ae96" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.601350] env[68244]: DEBUG nova.network.neutron [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.634309] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a11701-47e0-5437-4580-42046163f89c, 'name': SearchDatastore_Task, 'duration_secs': 0.009185} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.634600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.634857] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b84c2c08-651a-407d-89dd-177bc5d90313/b84c2c08-651a-407d-89dd-177bc5d90313.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1013.635591] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a36c4f95-4a4d-4386-ad94-badfbe3c85c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.642211] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1013.642211] env[68244]: value = "task-2780770" [ 1013.642211] env[68244]: _type = "Task" [ 1013.642211] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.651812] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.657415] env[68244]: DEBUG nova.compute.manager [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1013.657717] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1013.661821] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b82e62c-950e-459e-8b29-e7f8d957a2ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.669412] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1013.669756] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7247ce4e-6aca-43c3-a4b0-3b8495498423 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.678690] env[68244]: DEBUG oslo_vmware.api [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 1013.678690] env[68244]: value = "task-2780771" [ 1013.678690] env[68244]: _type = "Task" [ 1013.678690] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.687808] env[68244]: DEBUG oslo_vmware.api [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.781393] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780768, 'name': CreateSnapshot_Task, 'duration_secs': 0.444309} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.781759] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1013.782596] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b48961c-e076-45ea-85c1-f1de608463dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.785527] env[68244]: INFO nova.compute.manager [-] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Took 1.48 seconds to deallocate network for instance. [ 1013.859700] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780769, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.039196] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f33d541c-fc0a-4b12-b5e2-1efc7e2834f7 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "45ec526b-e9d8-4ea3-b0c8-af6da39b0158" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.498s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.148661] env[68244]: DEBUG nova.network.neutron [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.157154] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780770, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.197500] env[68244]: DEBUG oslo_vmware.api [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780771, 'name': PowerOffVM_Task, 'duration_secs': 0.163693} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.197834] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1014.198074] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1014.198388] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3366af6a-1185-4f6f-9196-c2b4f1e53271 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.283026] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1014.283215] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1014.283353] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Deleting the datastore file [datastore2] 10957648-8618-4f2c-8b08-5468bca20cfc {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1014.283631] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5215bb3-0d59-477e-aa57-4cdd4a44e85a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.292168] env[68244]: DEBUG oslo_vmware.api [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for the task: (returnval){ [ 1014.292168] env[68244]: value = "task-2780773" [ 1014.292168] env[68244]: _type = "Task" [ 1014.292168] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.300015] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.307306] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1014.309084] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1b368b70-c738-443a-a5e7-2ab8397a481d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.317021] env[68244]: DEBUG oslo_vmware.api [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780773, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.321013] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1014.321013] env[68244]: value = "task-2780774" [ 1014.321013] env[68244]: _type = "Task" [ 1014.321013] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.329236] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.329598] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "f579141b-1fac-4541-99c3-07644a0a358c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.329832] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "f579141b-1fac-4541-99c3-07644a0a358c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.330060] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "f579141b-1fac-4541-99c3-07644a0a358c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.330259] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "f579141b-1fac-4541-99c3-07644a0a358c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.330431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "f579141b-1fac-4541-99c3-07644a0a358c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.332739] env[68244]: INFO nova.compute.manager [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Terminating instance [ 1014.356043] env[68244]: DEBUG oslo_vmware.api [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780769, 'name': PowerOnVM_Task, 'duration_secs': 0.525445} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.357310] env[68244]: DEBUG nova.network.neutron [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Updating instance_info_cache with network_info: [{"id": "8cb9c661-5875-4af2-9420-68539b4270e7", "address": "fa:16:3e:d5:7e:a5", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cb9c661-58", "ovs_interfaceid": "8cb9c661-5875-4af2-9420-68539b4270e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.358630] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.358630] env[68244]: INFO nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Took 9.92 seconds to spawn the instance on the hypervisor. [ 1014.358841] env[68244]: DEBUG nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.359822] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d766964e-f4bc-4b39-9ede-c85544df08b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.660659] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780770, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519081} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.661044] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b84c2c08-651a-407d-89dd-177bc5d90313/b84c2c08-651a-407d-89dd-177bc5d90313.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1014.661509] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1014.661626] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37d43174-3f45-45e0-a87d-46cf7eb69ab4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.669349] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1014.669349] env[68244]: value = "task-2780775" [ 1014.669349] env[68244]: _type = "Task" [ 1014.669349] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.683985] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780775, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.802736] env[68244]: DEBUG oslo_vmware.api [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Task: {'id': task-2780773, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.436091} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.806078] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1014.806078] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1014.806078] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1014.806078] env[68244]: INFO nova.compute.manager [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1014.806388] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.808183] env[68244]: DEBUG nova.compute.manager [-] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1014.808183] env[68244]: DEBUG nova.network.neutron [-] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1014.834134] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.836029] env[68244]: DEBUG nova.compute.manager [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1014.839145] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1014.839145] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c72f01-961b-4b96-826c-ffee55ceea09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.846720] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1014.846978] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f635671-79dd-4010-af5e-5bb3684a2cdd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.854519] env[68244]: DEBUG oslo_vmware.api [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 1014.854519] env[68244]: value = "task-2780776" [ 1014.854519] env[68244]: _type = "Task" [ 1014.854519] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.865886] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Releasing lock "refresh_cache-f2e57bf9-05ee-49d8-846d-c3bf5920ae96" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.866308] env[68244]: DEBUG nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Instance network_info: |[{"id": "8cb9c661-5875-4af2-9420-68539b4270e7", "address": "fa:16:3e:d5:7e:a5", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cb9c661-58", "ovs_interfaceid": "8cb9c661-5875-4af2-9420-68539b4270e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1014.871249] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:7e:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8cb9c661-5875-4af2-9420-68539b4270e7', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.879502] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.880753] env[68244]: DEBUG nova.compute.manager [req-b64389ac-12d7-4fc4-8f8a-a108673f0dfa req-0b1e506d-d7ea-41c2-92cc-982f60cb01e6 service nova] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Received event network-vif-deleted-2c0febdf-3f54-4d82-8373-cfc91569d784 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1014.885330] env[68244]: DEBUG oslo_vmware.api [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.893990] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.897037] env[68244]: INFO nova.compute.manager [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Took 37.87 seconds to build instance. [ 1014.897037] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-264956a5-1dab-4560-95a0-b4ad36290c40 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.926373] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.926373] env[68244]: value = "task-2780777" [ 1014.926373] env[68244]: _type = "Task" [ 1014.926373] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.940084] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780777, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.970637] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7151ecf-039f-468a-8bdf-30e021ebf029 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.980532] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ecd78e-e8c5-41d3-91d1-54c1895d1757 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.014675] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549afa81-1624-4e8e-b603-130fda2abd0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.022393] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b20532-5015-402e-9df1-740a1d1814f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.036465] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1015.183758] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780775, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.184126] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1015.185082] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378fe6d9-8856-4797-aa2e-6f34a74850c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.218346] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] b84c2c08-651a-407d-89dd-177bc5d90313/b84c2c08-651a-407d-89dd-177bc5d90313.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.218681] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c077013-635b-4170-9e52-472a57867ba2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.239871] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1015.239871] env[68244]: value = "task-2780778" [ 1015.239871] env[68244]: _type = "Task" [ 1015.239871] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.249960] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780778, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.336579] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.364398] env[68244]: DEBUG oslo_vmware.api [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780776, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.418688] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e8a2fac9-dba9-4707-9d26-d869383c369a tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.401s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.438311] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780777, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.559196] env[68244]: ERROR nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [req-c605faa0-308e-412e-a9e4-eac81a41c2f5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c605faa0-308e-412e-a9e4-eac81a41c2f5"}]} [ 1015.577395] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1015.591918] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1015.592158] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1015.603205] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1015.621774] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1015.632361] env[68244]: DEBUG nova.network.neutron [-] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.752587] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780778, 'name': ReconfigVM_Task, 'duration_secs': 0.303053} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.755242] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Reconfigured VM instance instance-0000004c to attach disk [datastore2] b84c2c08-651a-407d-89dd-177bc5d90313/b84c2c08-651a-407d-89dd-177bc5d90313.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.756248] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42700328-51ad-42a9-9300-4031c7d5e59a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.763425] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1015.763425] env[68244]: value = "task-2780779" [ 1015.763425] env[68244]: _type = "Task" [ 1015.763425] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.779426] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780779, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.835496] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.864077] env[68244]: DEBUG oslo_vmware.api [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780776, 'name': PowerOffVM_Task, 'duration_secs': 0.595805} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.866874] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1015.867095] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1015.867550] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9b08044-b82c-4bbb-a0b3-5f8854bd5329 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.935977] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.936336] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.936488] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleting the datastore file [datastore1] f579141b-1fac-4541-99c3-07644a0a358c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.943136] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7882050b-79b9-49fc-9343-7f3b6a402388 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.944910] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780777, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.954234] env[68244]: DEBUG oslo_vmware.api [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 1015.954234] env[68244]: value = "task-2780781" [ 1015.954234] env[68244]: _type = "Task" [ 1015.954234] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.965248] env[68244]: DEBUG oslo_vmware.api [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.983432] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60a65fa-a3b4-41de-86a3-b78b28a76d7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.990791] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91cca2ae-3980-4f0d-88eb-52f6e99813e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.022059] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9d5b9d-08f4-4144-ac0f-ea991e057c9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.029849] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fccab86-85d0-4708-9135-e7c2c0de1d2b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.043271] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1016.137938] env[68244]: INFO nova.compute.manager [-] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Took 1.33 seconds to deallocate network for instance. [ 1016.273604] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780779, 'name': Rename_Task, 'duration_secs': 0.162398} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.274325] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.274325] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ade38b8-5e30-46b0-bf86-47d938a0aa52 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.279668] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1016.279668] env[68244]: value = "task-2780782" [ 1016.279668] env[68244]: _type = "Task" [ 1016.279668] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.289767] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780782, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.294174] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "cd2c4986-2092-4bc5-94c6-222f036c5e83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.294383] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.335676] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.440160] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780777, 'name': CreateVM_Task, 'duration_secs': 1.402522} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.440343] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1016.441073] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.441297] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.441652] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1016.441935] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce05ded7-d2f6-46f3-a057-20fc069dfbfe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.447965] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1016.447965] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d2fc80-39a5-ccc4-60cc-50f9f7f1a66b" [ 1016.447965] env[68244]: _type = "Task" [ 1016.447965] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.456121] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d2fc80-39a5-ccc4-60cc-50f9f7f1a66b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.463779] env[68244]: DEBUG oslo_vmware.api [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295083} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.464050] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1016.464250] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1016.465177] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1016.465177] env[68244]: INFO nova.compute.manager [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1016.465177] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.465177] env[68244]: DEBUG nova.compute.manager [-] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1016.465177] env[68244]: DEBUG nova.network.neutron [-] [instance: f579141b-1fac-4541-99c3-07644a0a358c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1016.565635] env[68244]: ERROR nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [req-2aa1e027-8115-475b-b6a1-7e0dd4312ab7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2aa1e027-8115-475b-b6a1-7e0dd4312ab7"}]} [ 1016.583261] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1016.598118] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1016.598398] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1016.610627] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1016.629734] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1016.645188] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.789698] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780782, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.797204] env[68244]: DEBUG nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1016.838644] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.957787] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d2fc80-39a5-ccc4-60cc-50f9f7f1a66b, 'name': SearchDatastore_Task, 'duration_secs': 0.012715} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.958110] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.958464] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1016.958626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.958774] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.958994] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1016.962258] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b828bfd5-6b25-45f1-8960-6668009cc5c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.966977] env[68244]: DEBUG nova.compute.manager [req-61c99955-b941-416e-b5f2-afb4a09c0b7e req-b11da933-d422-4e1d-8e88-61a197aed051 service nova] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Received event network-vif-deleted-4bc0d0f1-ef11-425c-987c-514c9b55015f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1016.967200] env[68244]: DEBUG nova.compute.manager [req-61c99955-b941-416e-b5f2-afb4a09c0b7e req-b11da933-d422-4e1d-8e88-61a197aed051 service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Received event network-vif-deleted-34e1c670-8287-43d2-9eac-d13b3e5a5c47 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1016.967351] env[68244]: INFO nova.compute.manager [req-61c99955-b941-416e-b5f2-afb4a09c0b7e req-b11da933-d422-4e1d-8e88-61a197aed051 service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Neutron deleted interface 34e1c670-8287-43d2-9eac-d13b3e5a5c47; detaching it from the instance and deleting it from the info cache [ 1016.967518] env[68244]: DEBUG nova.network.neutron [req-61c99955-b941-416e-b5f2-afb4a09c0b7e req-b11da933-d422-4e1d-8e88-61a197aed051 service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.978891] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1016.979115] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1016.979872] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e7c1b83-7483-426e-96a4-dc686eb25488 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.989598] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1016.989598] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52847f56-c230-4e9d-96c5-6abd19876d22" [ 1016.989598] env[68244]: _type = "Task" [ 1016.989598] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.998231] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52847f56-c230-4e9d-96c5-6abd19876d22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.049231] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97652221-a803-4fc7-8ded-e38dec151241 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.062901] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50317d9a-1a79-42f6-ac13-2025a0ddfff7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.098619] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63297514-6dd0-4427-80dc-2bd9b5990d00 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.107185] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76938551-4371-4cf0-88b3-cce1975be536 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.122311] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1017.277774] env[68244]: DEBUG nova.network.neutron [-] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.293795] env[68244]: DEBUG oslo_vmware.api [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780782, 'name': PowerOnVM_Task, 'duration_secs': 0.56312} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.295143] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.295554] env[68244]: INFO nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1017.295905] env[68244]: DEBUG nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.297559] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2980e2-c659-4132-94e0-88235967aa0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.325647] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.336024] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.444725] env[68244]: DEBUG nova.objects.instance [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lazy-loading 'flavor' on Instance uuid 92ce8150-982b-4669-b27a-4afd5c85da86 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.471262] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d1f5366-be0d-43c5-8861-aa536a80d6f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.483672] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f926c8-ebeb-4c32-ad37-84888bfb2745 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.504259] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52847f56-c230-4e9d-96c5-6abd19876d22, 'name': SearchDatastore_Task, 'duration_secs': 0.030157} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.505024] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-782693d1-0264-4245-bfb1-e44ebb4f3293 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.519646] env[68244]: DEBUG nova.compute.manager [req-61c99955-b941-416e-b5f2-afb4a09c0b7e req-b11da933-d422-4e1d-8e88-61a197aed051 service nova] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Detach interface failed, port_id=34e1c670-8287-43d2-9eac-d13b3e5a5c47, reason: Instance f579141b-1fac-4541-99c3-07644a0a358c could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1017.521115] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1017.521115] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52032f94-a4bf-e3ad-9b86-27ce50e57a8b" [ 1017.521115] env[68244]: _type = "Task" [ 1017.521115] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.528453] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52032f94-a4bf-e3ad-9b86-27ce50e57a8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.564274] env[68244]: DEBUG nova.compute.manager [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1017.642885] env[68244]: ERROR nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [req-848b33e5-e92f-4874-929b-6bca4ed86062] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-848b33e5-e92f-4874-929b-6bca4ed86062"}]} [ 1017.658392] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1017.670932] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1017.671302] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1017.682160] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1017.700096] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1017.781388] env[68244]: INFO nova.compute.manager [-] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Took 1.32 seconds to deallocate network for instance. [ 1017.823155] env[68244]: INFO nova.compute.manager [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Took 37.42 seconds to build instance. [ 1017.839911] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780774, 'name': CloneVM_Task, 'duration_secs': 3.144256} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.840828] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Created linked-clone VM from snapshot [ 1017.841608] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74cc4d2-d81b-4fba-8989-4af3b4eedaa0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.848995] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Uploading image 2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1017.872508] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1017.872508] env[68244]: value = "vm-559079" [ 1017.872508] env[68244]: _type = "VirtualMachine" [ 1017.872508] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1017.872780] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-01fa41b3-b4c4-43ae-aca5-44927c86f175 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.879524] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lease: (returnval){ [ 1017.879524] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52022eff-0618-badd-fc99-a936d108fcd9" [ 1017.879524] env[68244]: _type = "HttpNfcLease" [ 1017.879524] env[68244]: } obtained for exporting VM: (result){ [ 1017.879524] env[68244]: value = "vm-559079" [ 1017.879524] env[68244]: _type = "VirtualMachine" [ 1017.879524] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1017.879781] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the lease: (returnval){ [ 1017.879781] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52022eff-0618-badd-fc99-a936d108fcd9" [ 1017.879781] env[68244]: _type = "HttpNfcLease" [ 1017.879781] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1017.885692] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1017.885692] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52022eff-0618-badd-fc99-a936d108fcd9" [ 1017.885692] env[68244]: _type = "HttpNfcLease" [ 1017.885692] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1017.951839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.951970] env[68244]: DEBUG oslo_concurrency.lockutils [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquired lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.030924] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52032f94-a4bf-e3ad-9b86-27ce50e57a8b, 'name': SearchDatastore_Task, 'duration_secs': 0.057364} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.032067] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.032332] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f2e57bf9-05ee-49d8-846d-c3bf5920ae96/f2e57bf9-05ee-49d8-846d-c3bf5920ae96.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.033121] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ae1495-c6cf-4119-99b7-ff6bdadbb6f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.035548] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09e55bea-ba18-4480-8ed4-a3595b376542 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.043754] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e33be47-c0c2-41b5-a44c-e490c1cef04b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.047410] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1018.047410] env[68244]: value = "task-2780784" [ 1018.047410] env[68244]: _type = "Task" [ 1018.047410] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.080116] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd767d2e-d14c-4e1e-b4eb-de52a90ba9d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.085558] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.088037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.091093] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe33bb8-d0c4-4fab-9d1f-535c6eb3b910 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.104160] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1018.288519] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.326037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b20f814d-df8f-45be-bbb8-139fe2fa3b15 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "b84c2c08-651a-407d-89dd-177bc5d90313" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.937s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.354564] env[68244]: DEBUG nova.network.neutron [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1018.388974] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1018.388974] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52022eff-0618-badd-fc99-a936d108fcd9" [ 1018.388974] env[68244]: _type = "HttpNfcLease" [ 1018.388974] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1018.388974] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1018.388974] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52022eff-0618-badd-fc99-a936d108fcd9" [ 1018.388974] env[68244]: _type = "HttpNfcLease" [ 1018.388974] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1018.391261] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2b0a71-5b0a-43f3-922d-e219ed872777 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.397042] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b3efd-b7b8-62e6-95be-14fe5771716d/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1018.397167] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b3efd-b7b8-62e6-95be-14fe5771716d/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1018.485114] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-026a46b1-9e72-4985-9cd5-183357a8aa27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.561148] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780784, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.647729] env[68244]: DEBUG nova.scheduler.client.report [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 116 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1018.648043] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 116 to 117 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1018.648239] env[68244]: DEBUG nova.compute.provider_tree [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1018.989063] env[68244]: DEBUG nova.compute.manager [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Received event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1018.989497] env[68244]: DEBUG nova.compute.manager [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing instance network info cache due to event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1018.989811] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] Acquiring lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.059438] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.781318} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.059826] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f2e57bf9-05ee-49d8-846d-c3bf5920ae96/f2e57bf9-05ee-49d8-846d-c3bf5920ae96.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.060137] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.060471] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21055ff8-0844-42cc-9b0c-6f6f3287cceb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.068674] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1019.068674] env[68244]: value = "task-2780785" [ 1019.068674] env[68244]: _type = "Task" [ 1019.068674] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.079603] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.108635] env[68244]: DEBUG nova.network.neutron [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.154360] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.653s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.155229] env[68244]: DEBUG nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1019.158942] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.058s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.161218] env[68244]: INFO nova.compute.claims [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.578777] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069123} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.579089] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.580086] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6c386b-d451-4a2b-a9cd-3b083db07d85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.604117] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] f2e57bf9-05ee-49d8-846d-c3bf5920ae96/f2e57bf9-05ee-49d8-846d-c3bf5920ae96.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.604252] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f061c67-2d4b-463f-960e-255c17aba6ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.619645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Releasing lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.620055] env[68244]: DEBUG nova.compute.manager [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Inject network info {{(pid=68244) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1019.620965] env[68244]: DEBUG nova.compute.manager [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] network_info to inject: |[{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1019.625835] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Reconfiguring VM instance to set the machine id {{(pid=68244) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1019.628082] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] Acquired lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.628082] env[68244]: DEBUG nova.network.neutron [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1019.629242] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1431527-f84a-4fd5-a94f-1ea907042520 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.646327] env[68244]: DEBUG oslo_vmware.api [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 1019.646327] env[68244]: value = "task-2780787" [ 1019.646327] env[68244]: _type = "Task" [ 1019.646327] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.648504] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1019.648504] env[68244]: value = "task-2780786" [ 1019.648504] env[68244]: _type = "Task" [ 1019.648504] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.661989] env[68244]: DEBUG oslo_vmware.api [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780787, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.674608] env[68244]: DEBUG nova.compute.utils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1019.675300] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780786, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.676438] env[68244]: DEBUG nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1019.676701] env[68244]: DEBUG nova.network.neutron [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1019.736862] env[68244]: DEBUG nova.policy [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dd4fe2dbf154c1791b0bf2e9744629a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a16375181ca41fead00ee23bd2a9af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1019.759253] env[68244]: DEBUG nova.objects.instance [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lazy-loading 'flavor' on Instance uuid 92ce8150-982b-4669-b27a-4afd5c85da86 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.037427] env[68244]: DEBUG nova.network.neutron [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Successfully created port: a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1020.163839] env[68244]: DEBUG oslo_vmware.api [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780787, 'name': ReconfigVM_Task, 'duration_secs': 0.197937} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.169651] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-80d86fa4-c0e7-40d8-8b27-a40ca17caa59 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Reconfigured VM instance to set the machine id {{(pid=68244) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1020.172521] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.180111] env[68244]: DEBUG nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1020.269224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.493302] env[68244]: DEBUG nova.network.neutron [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updated VIF entry in instance network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1020.493541] env[68244]: DEBUG nova.network.neutron [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.605277] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844be78e-9e96-4774-a7b5-ea3181fbba2f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.613626] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04f5230-3475-4a14-97fa-2fe9fe93786f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.643801] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7d7ace-7f2b-41af-9328-9ef7bbc13a13 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.651059] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d9f49e-5e29-435b-8ee5-678d7a8300c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.673417] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780786, 'name': ReconfigVM_Task, 'duration_secs': 0.637846} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.676057] env[68244]: DEBUG nova.compute.provider_tree [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1020.677654] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Reconfigured VM instance instance-0000004d to attach disk [datastore2] f2e57bf9-05ee-49d8-846d-c3bf5920ae96/f2e57bf9-05ee-49d8-846d-c3bf5920ae96.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.678424] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2522a7ad-8656-496f-a3a4-e7df20f197e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.689356] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1020.689356] env[68244]: value = "task-2780788" [ 1020.689356] env[68244]: _type = "Task" [ 1020.689356] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.699075] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780788, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.996459] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c9536-c8c9-48b2-b426-68e8f9b0d7ee req-476dc6a2-59ca-4feb-81d2-0c6326026421 service nova] Releasing lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.996669] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquired lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.192797] env[68244]: DEBUG nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1021.204824] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780788, 'name': Rename_Task, 'duration_secs': 0.217877} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.204824] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.205775] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4594ac44-a36d-4b65-bf05-910106889d4f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.211876] env[68244]: DEBUG nova.scheduler.client.report [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1021.212036] env[68244]: DEBUG nova.compute.provider_tree [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 117 to 118 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1021.212227] env[68244]: DEBUG nova.compute.provider_tree [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1021.218360] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1021.218360] env[68244]: value = "task-2780789" [ 1021.218360] env[68244]: _type = "Task" [ 1021.218360] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.229650] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780789, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.231691] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.231946] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.232123] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.232306] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.232451] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.232599] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.232804] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.232960] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.233153] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.233316] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.233494] env[68244]: DEBUG nova.virt.hardware [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.234352] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf08ef82-a1eb-46f6-8b1c-af216931eb66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.244087] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffc32e0-a43a-420e-9322-b12a18755b03 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.331174] env[68244]: DEBUG nova.network.neutron [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1021.349838] env[68244]: DEBUG nova.compute.manager [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Received event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1021.350076] env[68244]: DEBUG nova.compute.manager [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing instance network info cache due to event network-changed-a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1021.350295] env[68244]: DEBUG oslo_concurrency.lockutils [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] Acquiring lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.661561] env[68244]: DEBUG nova.network.neutron [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Successfully updated port: a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.719896] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.720563] env[68244]: DEBUG nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1021.723889] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.118s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.725624] env[68244]: INFO nova.compute.claims [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1021.738817] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780789, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.066443] env[68244]: DEBUG nova.network.neutron [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.167748] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.167830] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.167954] env[68244]: DEBUG nova.network.neutron [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1022.238830] env[68244]: DEBUG nova.compute.utils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1022.239803] env[68244]: DEBUG oslo_vmware.api [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780789, 'name': PowerOnVM_Task, 'duration_secs': 0.674034} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.240208] env[68244]: DEBUG nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1022.240385] env[68244]: DEBUG nova.network.neutron [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1022.243034] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.243034] env[68244]: INFO nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Took 10.69 seconds to spawn the instance on the hypervisor. [ 1022.243034] env[68244]: DEBUG nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.243970] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a1a008-2d81-4a0f-a5bd-288597d90250 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.293054] env[68244]: DEBUG nova.policy [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fa4f9c8b0f1450788cd56a89e23d93a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a581fe596ee49c6b66f17d1ed11d120', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1022.569827] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Releasing lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.570200] env[68244]: DEBUG nova.compute.manager [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Inject network info {{(pid=68244) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1022.570474] env[68244]: DEBUG nova.compute.manager [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] network_info to inject: |[{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1022.576724] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Reconfiguring VM instance to set the machine id {{(pid=68244) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1022.576724] env[68244]: DEBUG oslo_concurrency.lockutils [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] Acquired lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.576724] env[68244]: DEBUG nova.network.neutron [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Refreshing network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.577724] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c529c5f6-1feb-45ce-b302-420847c407f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.593317] env[68244]: DEBUG nova.network.neutron [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Successfully created port: 03726444-b143-4a33-aabb-f2e439740b2d {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.597713] env[68244]: DEBUG oslo_vmware.api [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 1022.597713] env[68244]: value = "task-2780790" [ 1022.597713] env[68244]: _type = "Task" [ 1022.597713] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.607211] env[68244]: DEBUG oslo_vmware.api [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780790, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.725937] env[68244]: DEBUG nova.network.neutron [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1022.750828] env[68244]: DEBUG nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1022.767763] env[68244]: INFO nova.compute.manager [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Took 42.19 seconds to build instance. [ 1022.969976] env[68244]: DEBUG nova.network.neutron [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.118026] env[68244]: DEBUG oslo_vmware.api [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780790, 'name': ReconfigVM_Task, 'duration_secs': 0.156731} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.118026] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5c79d2c6-f121-46ba-baaa-5bcaa3d0e563 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Reconfigured VM instance to set the machine id {{(pid=68244) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1023.221372] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880c741c-a8f4-4b6a-b48f-423a6e7b8f28 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.230843] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c980c662-b354-445d-ab1f-7d49a0471bd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.270137] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427edbf9-c230-4749-807c-fe142544fe49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.273143] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e25f9f54-55d5-4eb6-a2e2-a904d29da28a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.706s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.279403] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ed2957-e1d6-4eeb-a35b-4b18edfb9923 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.294724] env[68244]: DEBUG nova.compute.provider_tree [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.375488] env[68244]: DEBUG nova.network.neutron [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updated VIF entry in instance network info cache for port a9cddce0-c422-4f46-a41f-feecfe3a6b8e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.375822] env[68244]: DEBUG nova.network.neutron [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [{"id": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "address": "fa:16:3e:e4:ca:52", "network": {"id": "a9803756-801b-48f4-8521-4db5d47e14ad", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085920282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b9e47e53c1f48e593e8d7161e9e3386", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30c39e9a-a798-4f25-a48c-91f786ba332c", "external-id": "nsx-vlan-transportzone-438", "segmentation_id": 438, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9cddce0-c4", "ovs_interfaceid": "a9cddce0-c422-4f46-a41f-feecfe3a6b8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.383154] env[68244]: DEBUG nova.compute.manager [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Received event network-vif-plugged-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1023.383154] env[68244]: DEBUG oslo_concurrency.lockutils [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.383154] env[68244]: DEBUG oslo_concurrency.lockutils [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.383267] env[68244]: DEBUG oslo_concurrency.lockutils [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.383383] env[68244]: DEBUG nova.compute.manager [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] No waiting events found dispatching network-vif-plugged-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1023.383595] env[68244]: WARNING nova.compute.manager [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Received unexpected event network-vif-plugged-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e for instance with vm_state building and task_state spawning. [ 1023.383695] env[68244]: DEBUG nova.compute.manager [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Received event network-changed-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1023.383888] env[68244]: DEBUG nova.compute.manager [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Refreshing instance network info cache due to event network-changed-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1023.383996] env[68244]: DEBUG oslo_concurrency.lockutils [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.465158] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "92ce8150-982b-4669-b27a-4afd5c85da86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.467024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "92ce8150-982b-4669-b27a-4afd5c85da86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.467146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "92ce8150-982b-4669-b27a-4afd5c85da86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.467343] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "92ce8150-982b-4669-b27a-4afd5c85da86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.467523] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "92ce8150-982b-4669-b27a-4afd5c85da86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.470217] env[68244]: INFO nova.compute.manager [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Terminating instance [ 1023.474768] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.474768] env[68244]: DEBUG nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Instance network_info: |[{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1023.474975] env[68244]: DEBUG oslo_concurrency.lockutils [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.475111] env[68244]: DEBUG nova.network.neutron [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Refreshing network info cache for port a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.478437] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:1e:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.485329] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Creating folder: Project (9a16375181ca41fead00ee23bd2a9af0). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1023.490106] env[68244]: DEBUG nova.compute.manager [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1023.490490] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1023.490600] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81bc5ae0-d651-4da5-a3ec-187be7685483 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.493235] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b0e5bf-a0fb-4225-a3ae-c2a90ec26de3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.502346] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1023.502606] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fae5453a-c743-42db-8d0e-731d2a4ec9e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.505737] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Created folder: Project (9a16375181ca41fead00ee23bd2a9af0) in parent group-v558876. [ 1023.506451] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Creating folder: Instances. Parent ref: group-v559081. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1023.506451] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73dfa164-2431-429e-9954-77e90a23423b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.512164] env[68244]: DEBUG oslo_vmware.api [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 1023.512164] env[68244]: value = "task-2780792" [ 1023.512164] env[68244]: _type = "Task" [ 1023.512164] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.522044] env[68244]: DEBUG oslo_vmware.api [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.524235] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Created folder: Instances in parent group-v559081. [ 1023.524681] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1023.525048] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.525428] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f77404ec-236e-4069-94e3-62113076bcda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.551301] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.551301] env[68244]: value = "task-2780794" [ 1023.551301] env[68244]: _type = "Task" [ 1023.551301] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.559836] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780794, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.775978] env[68244]: DEBUG nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1023.782378] env[68244]: DEBUG nova.network.neutron [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updated VIF entry in instance network info cache for port a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.782837] env[68244]: DEBUG nova.network.neutron [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.798306] env[68244]: DEBUG nova.scheduler.client.report [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.879463] env[68244]: DEBUG oslo_concurrency.lockutils [req-b70d9bcd-42c4-467e-bc17-4a841748571b req-f63102af-076a-468d-b7fd-84e80d373647 service nova] Releasing lock "refresh_cache-92ce8150-982b-4669-b27a-4afd5c85da86" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.025189] env[68244]: DEBUG oslo_vmware.api [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780792, 'name': PowerOffVM_Task, 'duration_secs': 0.257947} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.025692] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1024.025987] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.026411] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bb748b0-221a-4f83-87b9-cb7ce3b3bea0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.063395] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780794, 'name': CreateVM_Task, 'duration_secs': 0.456764} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.063395] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.063710] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.063871] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.064224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1024.064457] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68ad2112-0d8e-42f9-b141-d2e32ae92c7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.071522] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1024.071522] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522cc224-3c40-0a26-bf1d-81572a1f2b37" [ 1024.071522] env[68244]: _type = "Task" [ 1024.071522] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.083753] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522cc224-3c40-0a26-bf1d-81572a1f2b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.092029] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1024.092029] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1024.092029] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Deleting the datastore file [datastore2] 92ce8150-982b-4669-b27a-4afd5c85da86 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.093766] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.093979] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.094176] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.094364] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.094505] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.094647] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.094850] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.095038] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.095222] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.095383] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.095568] env[68244]: DEBUG nova.virt.hardware [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.095821] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a49b0f7-64b3-40b6-b4ce-4ed34a36250e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.098330] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08339856-a499-4fd6-9edd-8cf824cd7364 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.107690] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c106d4-98f3-4fe7-a29f-9419de0dd588 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.111878] env[68244]: DEBUG oslo_vmware.api [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for the task: (returnval){ [ 1024.111878] env[68244]: value = "task-2780796" [ 1024.111878] env[68244]: _type = "Task" [ 1024.111878] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.137431] env[68244]: DEBUG oslo_vmware.api [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.287849] env[68244]: DEBUG oslo_concurrency.lockutils [req-ed6ddd09-cba8-405c-b3b6-2102b78bb340 req-f1b88135-3caa-4dda-a87b-64fe7a661fb5 service nova] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.304141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.304684] env[68244]: DEBUG nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1024.307417] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.587s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.307587] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.309770] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.578s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.309948] env[68244]: DEBUG nova.objects.instance [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1024.337134] env[68244]: INFO nova.scheduler.client.report [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleted allocations for instance e8655168-1fe8-4590-90a3-2ad9438d7761 [ 1024.405985] env[68244]: DEBUG nova.compute.manager [req-11e4e0f2-929b-4ab5-86cf-eb615eab436d req-073566f2-15d2-47f1-a1f3-dc44cd9b0e77 service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Received event network-vif-plugged-03726444-b143-4a33-aabb-f2e439740b2d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1024.406256] env[68244]: DEBUG oslo_concurrency.lockutils [req-11e4e0f2-929b-4ab5-86cf-eb615eab436d req-073566f2-15d2-47f1-a1f3-dc44cd9b0e77 service nova] Acquiring lock "0c336f72-1cb9-468a-bf59-b0de937e1e94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.406469] env[68244]: DEBUG oslo_concurrency.lockutils [req-11e4e0f2-929b-4ab5-86cf-eb615eab436d req-073566f2-15d2-47f1-a1f3-dc44cd9b0e77 service nova] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.406725] env[68244]: DEBUG oslo_concurrency.lockutils [req-11e4e0f2-929b-4ab5-86cf-eb615eab436d req-073566f2-15d2-47f1-a1f3-dc44cd9b0e77 service nova] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.406911] env[68244]: DEBUG nova.compute.manager [req-11e4e0f2-929b-4ab5-86cf-eb615eab436d req-073566f2-15d2-47f1-a1f3-dc44cd9b0e77 service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] No waiting events found dispatching network-vif-plugged-03726444-b143-4a33-aabb-f2e439740b2d {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1024.407046] env[68244]: WARNING nova.compute.manager [req-11e4e0f2-929b-4ab5-86cf-eb615eab436d req-073566f2-15d2-47f1-a1f3-dc44cd9b0e77 service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Received unexpected event network-vif-plugged-03726444-b143-4a33-aabb-f2e439740b2d for instance with vm_state building and task_state spawning. [ 1024.584674] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522cc224-3c40-0a26-bf1d-81572a1f2b37, 'name': SearchDatastore_Task, 'duration_secs': 0.019584} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.584674] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.585137] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.585137] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.585271] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.585491] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.585844] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1c99d04-2281-4aa5-bc0c-9e6010e99fd4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.595780] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.596172] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.596974] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-784aff28-39c9-4135-aa54-b98c5b72a11c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.605799] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1024.605799] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c2bbc-8e3f-2dde-b5ac-8e19def9491f" [ 1024.605799] env[68244]: _type = "Task" [ 1024.605799] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.615911] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c2bbc-8e3f-2dde-b5ac-8e19def9491f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.627021] env[68244]: DEBUG oslo_vmware.api [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Task: {'id': task-2780796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262736} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.627271] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.627450] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.627625] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.627793] env[68244]: INFO nova.compute.manager [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1024.628045] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1024.628240] env[68244]: DEBUG nova.compute.manager [-] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1024.628332] env[68244]: DEBUG nova.network.neutron [-] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1024.767981] env[68244]: DEBUG nova.network.neutron [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Successfully updated port: 03726444-b143-4a33-aabb-f2e439740b2d {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.815983] env[68244]: DEBUG nova.compute.utils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1024.821220] env[68244]: DEBUG nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1024.821407] env[68244]: DEBUG nova.network.neutron [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1024.846032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ddd408d2-bba9-410a-9025-ecaecf3564b4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "e8655168-1fe8-4590-90a3-2ad9438d7761" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.884s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.896867] env[68244]: DEBUG nova.policy [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7907a57725f348b9bff1b8946818928a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0407e8a7e3746699519ee82f5f32909', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1025.122881] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c2bbc-8e3f-2dde-b5ac-8e19def9491f, 'name': SearchDatastore_Task, 'duration_secs': 0.012268} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.124133] env[68244]: DEBUG nova.compute.manager [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.125659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ee1849-6452-4071-b53f-d31859fef751 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.131209] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ed6776e-49f9-4d7a-aa1d-0a0aa8e27be9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.149046] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1025.149046] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cb190e-4052-831b-c477-9c7564617d64" [ 1025.149046] env[68244]: _type = "Task" [ 1025.149046] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.158785] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cb190e-4052-831b-c477-9c7564617d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.271190] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "refresh_cache-0c336f72-1cb9-468a-bf59-b0de937e1e94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.271368] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "refresh_cache-0c336f72-1cb9-468a-bf59-b0de937e1e94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.271546] env[68244]: DEBUG nova.network.neutron [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.321977] env[68244]: DEBUG nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1025.329106] env[68244]: DEBUG oslo_concurrency.lockutils [None req-59ab8dd1-7628-4213-b061-fe501574977c tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.330984] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.253s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.331217] env[68244]: DEBUG nova.objects.instance [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lazy-loading 'resources' on Instance uuid 828865d7-d06a-4683-9149-987e6d9efbd9 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.491479] env[68244]: DEBUG nova.compute.manager [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Received event network-changed-03726444-b143-4a33-aabb-f2e439740b2d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1025.491664] env[68244]: DEBUG nova.compute.manager [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Refreshing instance network info cache due to event network-changed-03726444-b143-4a33-aabb-f2e439740b2d. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1025.491848] env[68244]: DEBUG oslo_concurrency.lockutils [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] Acquiring lock "refresh_cache-0c336f72-1cb9-468a-bf59-b0de937e1e94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.649485] env[68244]: INFO nova.compute.manager [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] instance snapshotting [ 1025.657167] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944228c4-ebca-467f-8b48-daeb8711884d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.668105] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cb190e-4052-831b-c477-9c7564617d64, 'name': SearchDatastore_Task, 'duration_secs': 0.0197} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.684465] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.684831] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1/a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.686076] env[68244]: DEBUG nova.network.neutron [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Successfully created port: 435bf34a-456f-410e-89a2-4450cef07161 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1025.689360] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3e8d00e-317d-467a-9838-385199053284 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.691972] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6634043a-a036-46ec-9183-cdc720100d60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.703778] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1025.703778] env[68244]: value = "task-2780797" [ 1025.703778] env[68244]: _type = "Task" [ 1025.703778] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.712475] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.825192] env[68244]: DEBUG nova.network.neutron [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.076306] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b3efd-b7b8-62e6-95be-14fe5771716d/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1026.076306] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0eb2de-37d8-4d96-b8c1-e28ae39ad8fc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.076306] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b3efd-b7b8-62e6-95be-14fe5771716d/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1026.076306] env[68244]: ERROR oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b3efd-b7b8-62e6-95be-14fe5771716d/disk-0.vmdk due to incomplete transfer. [ 1026.076306] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-45591165-fa2a-4b70-a79c-444828cf5e4b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.076306] env[68244]: DEBUG oslo_vmware.rw_handles [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b3efd-b7b8-62e6-95be-14fe5771716d/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1026.076306] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Uploaded image 2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1026.076306] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1026.076306] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2ab21235-94f8-4688-ae10-c80ad5b0bcb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.076306] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1026.076306] env[68244]: value = "task-2780798" [ 1026.076306] env[68244]: _type = "Task" [ 1026.076306] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.076306] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780798, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.087655] env[68244]: DEBUG nova.network.neutron [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Updating instance_info_cache with network_info: [{"id": "03726444-b143-4a33-aabb-f2e439740b2d", "address": "fa:16:3e:1a:3f:f2", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03726444-b1", "ovs_interfaceid": "03726444-b143-4a33-aabb-f2e439740b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.105725] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "df935885-c313-473d-aa3a-ba81aa999554" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.106570] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.195523] env[68244]: DEBUG nova.network.neutron [-] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.206298] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1026.206495] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ac20cfbf-5c15-41c0-acaf-0b77a31ccd0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.223523] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780797, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.225778] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1026.225778] env[68244]: value = "task-2780799" [ 1026.225778] env[68244]: _type = "Task" [ 1026.225778] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.239958] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780799, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.340434] env[68244]: DEBUG nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1026.372615] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1026.373220] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.373543] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1026.373886] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.374221] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1026.378016] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1026.378016] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1026.378016] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1026.378016] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1026.378016] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1026.378016] env[68244]: DEBUG nova.virt.hardware [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1026.378016] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ce6e75-2940-4589-8cb8-d019e0dd7c7a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.383984] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2ce09f-a0da-4341-93c0-237ede32c385 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.394119] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b4e9a0-5dd2-4836-b9bf-a74c01708a3f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.399675] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a431a6cd-cb48-432e-9251-b8997b41068b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.438883] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f425aa9b-3691-492c-81cd-8ac1cb82a2bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.447220] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4cf8ea-b7a4-41d1-853c-432ef40e6b61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.459764] env[68244]: DEBUG nova.compute.provider_tree [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.477217] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780798, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.592232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "refresh_cache-0c336f72-1cb9-468a-bf59-b0de937e1e94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.593030] env[68244]: DEBUG nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Instance network_info: |[{"id": "03726444-b143-4a33-aabb-f2e439740b2d", "address": "fa:16:3e:1a:3f:f2", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03726444-b1", "ovs_interfaceid": "03726444-b143-4a33-aabb-f2e439740b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1026.593030] env[68244]: DEBUG oslo_concurrency.lockutils [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] Acquired lock "refresh_cache-0c336f72-1cb9-468a-bf59-b0de937e1e94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.593235] env[68244]: DEBUG nova.network.neutron [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Refreshing network info cache for port 03726444-b143-4a33-aabb-f2e439740b2d {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.596457] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:3f:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03726444-b143-4a33-aabb-f2e439740b2d', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.602772] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1026.604356] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.604356] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44bdf371-879a-460e-a2d2-8eb3bff52f7a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.619585] env[68244]: DEBUG nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1026.628492] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.628492] env[68244]: value = "task-2780800" [ 1026.628492] env[68244]: _type = "Task" [ 1026.628492] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.638774] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780800, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.701061] env[68244]: INFO nova.compute.manager [-] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Took 2.07 seconds to deallocate network for instance. [ 1026.715972] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780797, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.734760] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780799, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.963781] env[68244]: DEBUG nova.scheduler.client.report [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.978133] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780798, 'name': Destroy_Task, 'duration_secs': 0.733599} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.979047] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Destroyed the VM [ 1026.979313] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1026.979565] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-55baa9ac-f5ff-4489-a044-cadd3b14a045 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.986047] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1026.986047] env[68244]: value = "task-2780801" [ 1026.986047] env[68244]: _type = "Task" [ 1026.986047] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.994328] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780801, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.139038] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780800, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.140039] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.213890] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.236509] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780797, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.246802] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780799, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.443622] env[68244]: DEBUG nova.network.neutron [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Updated VIF entry in instance network info cache for port 03726444-b143-4a33-aabb-f2e439740b2d. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.444066] env[68244]: DEBUG nova.network.neutron [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Updating instance_info_cache with network_info: [{"id": "03726444-b143-4a33-aabb-f2e439740b2d", "address": "fa:16:3e:1a:3f:f2", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03726444-b1", "ovs_interfaceid": "03726444-b143-4a33-aabb-f2e439740b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.469435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.471739] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.136s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.473235] env[68244]: INFO nova.compute.claims [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.495083] env[68244]: INFO nova.scheduler.client.report [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Deleted allocations for instance 828865d7-d06a-4683-9149-987e6d9efbd9 [ 1027.506611] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780801, 'name': RemoveSnapshot_Task, 'duration_secs': 0.366931} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.508275] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1027.508275] env[68244]: DEBUG nova.compute.manager [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.512691] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73b757e-1593-46f9-b812-b63116eac01e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.523536] env[68244]: DEBUG nova.network.neutron [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Successfully updated port: 435bf34a-456f-410e-89a2-4450cef07161 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.561711] env[68244]: DEBUG nova.compute.manager [req-457f2b9a-af31-49aa-b412-82dcfa12a892 req-cc9b5d67-e89a-4bf1-afe5-dfa5bdaacd87 service nova] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Received event network-vif-deleted-a9cddce0-c422-4f46-a41f-feecfe3a6b8e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1027.561933] env[68244]: DEBUG nova.compute.manager [req-457f2b9a-af31-49aa-b412-82dcfa12a892 req-cc9b5d67-e89a-4bf1-afe5-dfa5bdaacd87 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Received event network-vif-plugged-435bf34a-456f-410e-89a2-4450cef07161 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1027.562139] env[68244]: DEBUG oslo_concurrency.lockutils [req-457f2b9a-af31-49aa-b412-82dcfa12a892 req-cc9b5d67-e89a-4bf1-afe5-dfa5bdaacd87 service nova] Acquiring lock "246e079b-9fc1-442f-9c20-4e0c05e152e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.563021] env[68244]: DEBUG oslo_concurrency.lockutils [req-457f2b9a-af31-49aa-b412-82dcfa12a892 req-cc9b5d67-e89a-4bf1-afe5-dfa5bdaacd87 service nova] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.563021] env[68244]: DEBUG oslo_concurrency.lockutils [req-457f2b9a-af31-49aa-b412-82dcfa12a892 req-cc9b5d67-e89a-4bf1-afe5-dfa5bdaacd87 service nova] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.563021] env[68244]: DEBUG nova.compute.manager [req-457f2b9a-af31-49aa-b412-82dcfa12a892 req-cc9b5d67-e89a-4bf1-afe5-dfa5bdaacd87 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] No waiting events found dispatching network-vif-plugged-435bf34a-456f-410e-89a2-4450cef07161 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1027.563021] env[68244]: WARNING nova.compute.manager [req-457f2b9a-af31-49aa-b412-82dcfa12a892 req-cc9b5d67-e89a-4bf1-afe5-dfa5bdaacd87 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Received unexpected event network-vif-plugged-435bf34a-456f-410e-89a2-4450cef07161 for instance with vm_state building and task_state spawning. [ 1027.639431] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780800, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.725964] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780797, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.686724} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.726327] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1/a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1027.726545] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1027.726789] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b145d87d-696f-4595-a11e-131d7d0107fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.738353] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1027.738353] env[68244]: value = "task-2780802" [ 1027.738353] env[68244]: _type = "Task" [ 1027.738353] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.744826] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780799, 'name': CreateSnapshot_Task, 'duration_secs': 1.202371} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.745417] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1027.746162] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be68466-ff8d-4d2c-9d2e-279bb52ac516 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.751209] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780802, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.946648] env[68244]: DEBUG oslo_concurrency.lockutils [req-d148a657-9e56-4203-af29-16a744cd95e5 req-e5ddb67d-193e-4409-88d3-b9c8fb65dffc service nova] Releasing lock "refresh_cache-0c336f72-1cb9-468a-bf59-b0de937e1e94" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.009907] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c81213ce-e40d-4f68-b216-3b9e0487384c tempest-ServersTestBootFromVolume-1236961585 tempest-ServersTestBootFromVolume-1236961585-project-member] Lock "828865d7-d06a-4683-9149-987e6d9efbd9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.190s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.033027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "refresh_cache-246e079b-9fc1-442f-9c20-4e0c05e152e3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.033027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquired lock "refresh_cache-246e079b-9fc1-442f-9c20-4e0c05e152e3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.033027] env[68244]: DEBUG nova.network.neutron [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.036185] env[68244]: INFO nova.compute.manager [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Shelve offloading [ 1028.140106] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780800, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.247980] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780802, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.228495} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.248267] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1028.249106] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b1c128-0bb1-448b-a3f6-34c1795c5744 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.269372] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1028.277717] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1/a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.277955] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-eaf11d15-3165-4dbf-8eba-eb685627fb99 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.280728] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28a5a3e2-ffdd-46c1-9519-35aba88b9420 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.300506] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1028.300506] env[68244]: value = "task-2780803" [ 1028.300506] env[68244]: _type = "Task" [ 1028.300506] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.301425] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1028.301425] env[68244]: value = "task-2780804" [ 1028.301425] env[68244]: _type = "Task" [ 1028.301425] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.312201] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780804, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.314573] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780803, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.544368] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.545359] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c3ee44c-cfb3-4b55-a92d-6225fd6c2054 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.554763] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1028.554763] env[68244]: value = "task-2780805" [ 1028.554763] env[68244]: _type = "Task" [ 1028.554763] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.566910] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1028.567051] env[68244]: DEBUG nova.compute.manager [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.567858] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d7e662-9d7f-421c-bc93-7b122026c057 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.574477] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.574477] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.574584] env[68244]: DEBUG nova.network.neutron [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.589734] env[68244]: DEBUG nova.network.neutron [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1028.645406] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780800, 'name': CreateVM_Task, 'duration_secs': 1.855062} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.645406] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.645406] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.645406] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.645406] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.647714] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c37aa8d-b250-437c-ae69-5402daf98eec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.656793] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1028.656793] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b87d9-e3d4-6325-e55c-02e50eb10bb5" [ 1028.656793] env[68244]: _type = "Task" [ 1028.656793] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.667688] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b87d9-e3d4-6325-e55c-02e50eb10bb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.791044] env[68244]: DEBUG nova.network.neutron [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Updating instance_info_cache with network_info: [{"id": "435bf34a-456f-410e-89a2-4450cef07161", "address": "fa:16:3e:59:52:a1", "network": {"id": "c09ffb8f-e85e-43d6-8935-446edbf41ed6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-577544612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0407e8a7e3746699519ee82f5f32909", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap435bf34a-45", "ovs_interfaceid": "435bf34a-456f-410e-89a2-4450cef07161", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.816523] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780803, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.823539] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780804, 'name': ReconfigVM_Task, 'duration_secs': 0.379136} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.823952] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Reconfigured VM instance instance-0000004e to attach disk [datastore2] a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1/a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.825992] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-658c3a29-0d97-45a2-9caa-fcfe98cf5345 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.832759] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1028.832759] env[68244]: value = "task-2780806" [ 1028.832759] env[68244]: _type = "Task" [ 1028.832759] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.843202] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780806, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.912249] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8ed296-a195-4830-9300-141c97fc5035 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.923608] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d17ed6-8985-4d75-88a5-b6bbeb3e2ad0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.961819] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead9c946-bfcc-4eaa-92d5-5d686cb5d94e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.970716] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a6b4d2-231f-454c-8735-1f0e979610ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.984732] env[68244]: DEBUG nova.compute.provider_tree [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1029.169405] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b87d9-e3d4-6325-e55c-02e50eb10bb5, 'name': SearchDatastore_Task, 'duration_secs': 0.012003} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.169839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.170182] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.170609] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.170714] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.171012] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.173949] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99709cb1-c124-4c95-864f-59ce2772796a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.183433] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.183618] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.184553] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d3a5d17-c597-4683-a0b8-3da88b71b466 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.192520] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1029.192520] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523954ec-3d90-36e3-e5e0-edee1d4b81a6" [ 1029.192520] env[68244]: _type = "Task" [ 1029.192520] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.202568] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523954ec-3d90-36e3-e5e0-edee1d4b81a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.295738] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Releasing lock "refresh_cache-246e079b-9fc1-442f-9c20-4e0c05e152e3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.296112] env[68244]: DEBUG nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Instance network_info: |[{"id": "435bf34a-456f-410e-89a2-4450cef07161", "address": "fa:16:3e:59:52:a1", "network": {"id": "c09ffb8f-e85e-43d6-8935-446edbf41ed6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-577544612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0407e8a7e3746699519ee82f5f32909", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap435bf34a-45", "ovs_interfaceid": "435bf34a-456f-410e-89a2-4450cef07161", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1029.296547] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:52:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '435bf34a-456f-410e-89a2-4450cef07161', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.304315] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Creating folder: Project (a0407e8a7e3746699519ee82f5f32909). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.304626] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7df924d-b91a-4c51-b998-57c38f4c9654 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.317037] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780803, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.317037] env[68244]: DEBUG nova.network.neutron [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabbd3e34-94", "ovs_interfaceid": "abbd3e34-9461-4503-86ee-598fe02a65d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.318565] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Created folder: Project (a0407e8a7e3746699519ee82f5f32909) in parent group-v558876. [ 1029.318696] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Creating folder: Instances. Parent ref: group-v559087. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.319076] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f01edb00-ef5a-41d0-bfa5-4c9e8e38d46e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.327285] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Created folder: Instances in parent group-v559087. [ 1029.327514] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.327700] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.327898] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-258d475b-4d5c-4a79-8a5e-411dc376fe87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.352733] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780806, 'name': Rename_Task, 'duration_secs': 0.167836} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.353993] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.354219] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.354219] env[68244]: value = "task-2780809" [ 1029.354219] env[68244]: _type = "Task" [ 1029.354219] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.354388] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-583efca7-5a54-4e88-a84e-dff01b79fd9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.363698] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780809, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.365043] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1029.365043] env[68244]: value = "task-2780810" [ 1029.365043] env[68244]: _type = "Task" [ 1029.365043] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.372746] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.519745] env[68244]: ERROR nova.scheduler.client.report [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [req-6d724a9d-a9c4-44ff-ad42-a8a5dbe6171b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6d724a9d-a9c4-44ff-ad42-a8a5dbe6171b"}]} [ 1029.538213] env[68244]: DEBUG nova.scheduler.client.report [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1029.558884] env[68244]: DEBUG nova.scheduler.client.report [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1029.561149] env[68244]: DEBUG nova.compute.provider_tree [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1029.575111] env[68244]: DEBUG nova.scheduler.client.report [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1029.595798] env[68244]: DEBUG nova.scheduler.client.report [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1029.603293] env[68244]: DEBUG nova.compute.manager [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Received event network-changed-435bf34a-456f-410e-89a2-4450cef07161 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1029.603293] env[68244]: DEBUG nova.compute.manager [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Refreshing instance network info cache due to event network-changed-435bf34a-456f-410e-89a2-4450cef07161. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1029.603293] env[68244]: DEBUG oslo_concurrency.lockutils [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] Acquiring lock "refresh_cache-246e079b-9fc1-442f-9c20-4e0c05e152e3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.603293] env[68244]: DEBUG oslo_concurrency.lockutils [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] Acquired lock "refresh_cache-246e079b-9fc1-442f-9c20-4e0c05e152e3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.603293] env[68244]: DEBUG nova.network.neutron [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Refreshing network info cache for port 435bf34a-456f-410e-89a2-4450cef07161 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1029.712242] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523954ec-3d90-36e3-e5e0-edee1d4b81a6, 'name': SearchDatastore_Task, 'duration_secs': 0.010756} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.713153] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e4a54d0-0305-4333-9bd4-7f56b3487536 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.720382] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1029.720382] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b99ace-91a8-c99d-d14b-bc01430ed6cf" [ 1029.720382] env[68244]: _type = "Task" [ 1029.720382] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.731767] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b99ace-91a8-c99d-d14b-bc01430ed6cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.816667] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780803, 'name': CloneVM_Task, 'duration_secs': 1.366808} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.817064] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Created linked-clone VM from snapshot [ 1029.817914] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e1919f-4a5f-47d4-a141-1eca0b715271 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.820858] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.827912] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Uploading image 9b708d7d-4aac-4e76-b504-fa7363e0a73e {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1029.864986] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1029.864986] env[68244]: value = "vm-559086" [ 1029.864986] env[68244]: _type = "VirtualMachine" [ 1029.864986] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1029.865404] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-57c4e14e-0f21-4b7a-aab8-4c8ff8c85e63 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.876730] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780809, 'name': CreateVM_Task, 'duration_secs': 0.41918} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.880054] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1029.881117] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.881431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.881746] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1029.886365] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7e38394-140e-4757-8e23-9b1fb7e46384 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.888261] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780810, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.888609] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lease: (returnval){ [ 1029.888609] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526468cc-eb57-b0b8-c7a7-abd1d07b9914" [ 1029.888609] env[68244]: _type = "HttpNfcLease" [ 1029.888609] env[68244]: } obtained for exporting VM: (result){ [ 1029.888609] env[68244]: value = "vm-559086" [ 1029.888609] env[68244]: _type = "VirtualMachine" [ 1029.888609] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1029.889308] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the lease: (returnval){ [ 1029.889308] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526468cc-eb57-b0b8-c7a7-abd1d07b9914" [ 1029.889308] env[68244]: _type = "HttpNfcLease" [ 1029.889308] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1029.895756] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1029.895756] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5208e641-9599-a92c-aeec-b18296db019b" [ 1029.895756] env[68244]: _type = "Task" [ 1029.895756] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.904040] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1029.904040] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526468cc-eb57-b0b8-c7a7-abd1d07b9914" [ 1029.904040] env[68244]: _type = "HttpNfcLease" [ 1029.904040] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1029.904809] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1029.904809] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526468cc-eb57-b0b8-c7a7-abd1d07b9914" [ 1029.904809] env[68244]: _type = "HttpNfcLease" [ 1029.904809] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1029.905733] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b78560-210f-4efd-932c-d64a4ddf858d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.911184] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5208e641-9599-a92c-aeec-b18296db019b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.918443] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280abd8-73a6-6131-f978-1b826fb5c8d8/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1029.918596] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280abd8-73a6-6131-f978-1b826fb5c8d8/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1030.026590] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9a4fbf2b-2b61-47ba-bc3f-46d0838bb592 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.073974] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f159570-5af5-4e53-b8e0-3ec4c2cc2ee6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.081185] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a339ac2-8af5-4b4d-84f3-80fef2667b36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.116202] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4d02a4-ac7f-42fd-b384-b7ff9370efbc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.125404] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1ebdfa-e27d-4a49-8979-7ddfe27cdafd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.139754] env[68244]: DEBUG nova.compute.provider_tree [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1030.210021] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.210945] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72902a3-2044-4db6-b623-9023a1f4c31d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.219235] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1030.219522] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3da4cb0f-b038-49b6-918a-847c55b8a31c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.232687] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b99ace-91a8-c99d-d14b-bc01430ed6cf, 'name': SearchDatastore_Task, 'duration_secs': 0.011277} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.235605] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.235605] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0c336f72-1cb9-468a-bf59-b0de937e1e94/0c336f72-1cb9-468a-bf59-b0de937e1e94.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.236143] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76bd0813-d00b-4dc3-9a04-75aad3ff2170 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.242802] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1030.242802] env[68244]: value = "task-2780813" [ 1030.242802] env[68244]: _type = "Task" [ 1030.242802] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.257052] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780813, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.289215] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1030.289470] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1030.289656] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleting the datastore file [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.289919] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1dc966c-7c87-4738-a647-1614db919234 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.296827] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1030.296827] env[68244]: value = "task-2780814" [ 1030.296827] env[68244]: _type = "Task" [ 1030.296827] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.305054] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780814, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.377943] env[68244]: DEBUG oslo_vmware.api [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780810, 'name': PowerOnVM_Task, 'duration_secs': 0.567498} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.378230] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1030.378435] env[68244]: INFO nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Took 9.19 seconds to spawn the instance on the hypervisor. [ 1030.378614] env[68244]: DEBUG nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.379528] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6fd300-fa3c-4880-9b5f-b42ff4bed124 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.394726] env[68244]: DEBUG nova.network.neutron [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Updated VIF entry in instance network info cache for port 435bf34a-456f-410e-89a2-4450cef07161. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1030.395317] env[68244]: DEBUG nova.network.neutron [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Updating instance_info_cache with network_info: [{"id": "435bf34a-456f-410e-89a2-4450cef07161", "address": "fa:16:3e:59:52:a1", "network": {"id": "c09ffb8f-e85e-43d6-8935-446edbf41ed6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-577544612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0407e8a7e3746699519ee82f5f32909", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap435bf34a-45", "ovs_interfaceid": "435bf34a-456f-410e-89a2-4450cef07161", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.412320] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5208e641-9599-a92c-aeec-b18296db019b, 'name': SearchDatastore_Task, 'duration_secs': 0.025699} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.412630] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.412857] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.413100] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.413240] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.413428] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.414469] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0df2a5e8-f178-440b-aeee-d107226b2b76 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.424677] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.424935] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.425959] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbf82420-170e-4985-8a50-66aaa3dcc2be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.433806] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1030.433806] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5252e071-c935-82eb-5822-ad06b8ad550a" [ 1030.433806] env[68244]: _type = "Task" [ 1030.433806] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.443769] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5252e071-c935-82eb-5822-ad06b8ad550a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.692291] env[68244]: DEBUG nova.scheduler.client.report [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1030.692688] env[68244]: DEBUG nova.compute.provider_tree [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 119 to 120 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1030.694057] env[68244]: DEBUG nova.compute.provider_tree [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1030.757900] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780813, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.811020] env[68244]: DEBUG oslo_vmware.api [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780814, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311287} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.811020] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.811020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.811020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.857006] env[68244]: INFO nova.scheduler.client.report [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleted allocations for instance ed5b8ba3-c8f0-468f-85d1-f36179bfef32 [ 1030.898922] env[68244]: DEBUG oslo_concurrency.lockutils [req-84fc6941-7836-4801-aa41-34857656f6f3 req-2d722d74-46e3-4cee-8637-f2b0fa5903a7 service nova] Releasing lock "refresh_cache-246e079b-9fc1-442f-9c20-4e0c05e152e3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.903680] env[68244]: INFO nova.compute.manager [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Took 38.14 seconds to build instance. [ 1030.943019] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5252e071-c935-82eb-5822-ad06b8ad550a, 'name': SearchDatastore_Task, 'duration_secs': 0.018635} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.943924] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ebbea59-9ab6-40dc-87db-795e2c901071 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.949422] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1030.949422] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7d06b-41cc-f5cb-a127-631ac3e00ef5" [ 1030.949422] env[68244]: _type = "Task" [ 1030.949422] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.958582] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7d06b-41cc-f5cb-a127-631ac3e00ef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.984647] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.984944] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.208195] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.737s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.209060] env[68244]: DEBUG nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1031.214808] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.300s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.214808] env[68244]: INFO nova.compute.claims [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1031.257420] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780813, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689081} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.257765] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0c336f72-1cb9-468a-bf59-b0de937e1e94/0c336f72-1cb9-468a-bf59-b0de937e1e94.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1031.258007] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.258278] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-546693e1-a92b-49e8-bf93-7034ab077146 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.267234] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1031.267234] env[68244]: value = "task-2780815" [ 1031.267234] env[68244]: _type = "Task" [ 1031.267234] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.284027] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780815, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.362436] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.406698] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9843b1e3-0744-4d91-b05c-9f69108d2357 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.648s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.463401] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a7d06b-41cc-f5cb-a127-631ac3e00ef5, 'name': SearchDatastore_Task, 'duration_secs': 0.022806} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.463699] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.464113] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 246e079b-9fc1-442f-9c20-4e0c05e152e3/246e079b-9fc1-442f-9c20-4e0c05e152e3.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1031.464508] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b451d54-b794-45c1-889f-fa119129a1b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.471856] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1031.471856] env[68244]: value = "task-2780816" [ 1031.471856] env[68244]: _type = "Task" [ 1031.471856] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.495161] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.495953] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.495953] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.495953] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.496249] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.496249] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.497483] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.497483] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1031.497483] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.629127] env[68244]: DEBUG nova.compute.manager [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Received event network-changed-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1031.629953] env[68244]: DEBUG nova.compute.manager [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Refreshing instance network info cache due to event network-changed-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1031.629953] env[68244]: DEBUG oslo_concurrency.lockutils [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.630368] env[68244]: DEBUG oslo_concurrency.lockutils [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.630491] env[68244]: DEBUG nova.network.neutron [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Refreshing network info cache for port a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.653349] env[68244]: DEBUG nova.compute.manager [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received event network-vif-unplugged-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1031.656145] env[68244]: DEBUG oslo_concurrency.lockutils [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.656145] env[68244]: DEBUG oslo_concurrency.lockutils [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.656145] env[68244]: DEBUG oslo_concurrency.lockutils [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.656283] env[68244]: DEBUG nova.compute.manager [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] No waiting events found dispatching network-vif-unplugged-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.656423] env[68244]: WARNING nova.compute.manager [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received unexpected event network-vif-unplugged-abbd3e34-9461-4503-86ee-598fe02a65d3 for instance with vm_state shelved_offloaded and task_state None. [ 1031.656729] env[68244]: DEBUG nova.compute.manager [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received event network-changed-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1031.657037] env[68244]: DEBUG nova.compute.manager [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Refreshing instance network info cache due to event network-changed-abbd3e34-9461-4503-86ee-598fe02a65d3. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1031.657364] env[68244]: DEBUG oslo_concurrency.lockutils [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] Acquiring lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.657483] env[68244]: DEBUG oslo_concurrency.lockutils [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] Acquired lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.657842] env[68244]: DEBUG nova.network.neutron [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Refreshing network info cache for port abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.721083] env[68244]: DEBUG nova.compute.utils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1031.726478] env[68244]: DEBUG nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1031.726768] env[68244]: DEBUG nova.network.neutron [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1031.778587] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780815, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143148} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.781062] env[68244]: DEBUG nova.policy [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6af77f00c84d4e99bea878bc30dcc361', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '821b99c053aa45b4b6b8fb09eb63aa73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1031.782829] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.784328] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6a8190-1bcc-49fd-a411-470af67efd94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.816387] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 0c336f72-1cb9-468a-bf59-b0de937e1e94/0c336f72-1cb9-468a-bf59-b0de937e1e94.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.817314] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80d1e38b-6b77-4c44-985c-0bc36263b184 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.841263] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1031.841263] env[68244]: value = "task-2780817" [ 1031.841263] env[68244]: _type = "Task" [ 1031.841263] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.851875] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780817, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.985625] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780816, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.999642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.113910] env[68244]: DEBUG nova.network.neutron [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Successfully created port: 30943db2-6cce-4a90-9e45-5cf14eb6e799 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.225891] env[68244]: DEBUG nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1032.352128] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.495180] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780816, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601802} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.496549] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 246e079b-9fc1-442f-9c20-4e0c05e152e3/246e079b-9fc1-442f-9c20-4e0c05e152e3.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1032.497408] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1032.497812] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ff338c6-7cef-4a81-95df-b3df1498b478 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.508740] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1032.508740] env[68244]: value = "task-2780818" [ 1032.508740] env[68244]: _type = "Task" [ 1032.508740] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.528396] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.643198] env[68244]: DEBUG nova.network.neutron [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updated VIF entry in instance network info cache for port abbd3e34-9461-4503-86ee-598fe02a65d3. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.643745] env[68244]: DEBUG nova.network.neutron [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapabbd3e34-94", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.695499] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7fd5de-8f7a-4f03-b11c-15e3b42f7041 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.705781] env[68244]: DEBUG nova.network.neutron [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updated VIF entry in instance network info cache for port a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.706391] env[68244]: DEBUG nova.network.neutron [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.719925] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06eaaff-e42e-418e-88d2-ebf43baf7dc6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.785127] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a739bf-e41d-49e3-be12-0a207e21bea3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.793520] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c4c5a9-00cb-4b78-82ad-24418d54feaa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.813292] env[68244]: DEBUG nova.compute.provider_tree [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.854281] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "477da9d1-8550-48be-b243-519b4f0ca443" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.854468] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "477da9d1-8550-48be-b243-519b4f0ca443" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.859256] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780817, 'name': ReconfigVM_Task, 'duration_secs': 0.702367} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.859726] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 0c336f72-1cb9-468a-bf59-b0de937e1e94/0c336f72-1cb9-468a-bf59-b0de937e1e94.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.860391] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b457591b-6361-44ca-80e3-9fa8a436e5ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.869499] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1032.869499] env[68244]: value = "task-2780819" [ 1032.869499] env[68244]: _type = "Task" [ 1032.869499] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.879411] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780819, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.019228] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107015} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.019504] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1033.020346] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13129187-71fa-4a84-8ac3-3655e11e0b53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.042369] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 246e079b-9fc1-442f-9c20-4e0c05e152e3/246e079b-9fc1-442f-9c20-4e0c05e152e3.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1033.042681] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5413c2b7-889c-457c-80fd-97c4f1af252e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.063093] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1033.063093] env[68244]: value = "task-2780820" [ 1033.063093] env[68244]: _type = "Task" [ 1033.063093] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.071298] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780820, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.149252] env[68244]: DEBUG oslo_concurrency.lockutils [req-af666f9a-c4a3-4d11-a1a9-fb859b619437 req-7c714594-f001-4ae3-9a5e-4e666e1ebfb5 service nova] Releasing lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.211436] env[68244]: DEBUG oslo_concurrency.lockutils [req-3eb54830-4318-4811-8f76-121ec41361ec req-603e7b90-0d99-4173-a325-d5ffe2693ead service nova] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.285760] env[68244]: DEBUG nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1033.308953] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.309350] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.309610] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.309896] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.310092] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.310310] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.310673] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.310948] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.311219] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.311421] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.311731] env[68244]: DEBUG nova.virt.hardware [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.312663] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be42722-e5cf-4f8d-a815-59994fb8a71b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.315969] env[68244]: DEBUG nova.scheduler.client.report [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.324709] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b072aced-5fdb-4987-8b66-2e3c3242c5f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.361041] env[68244]: DEBUG nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1033.380136] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780819, 'name': Rename_Task, 'duration_secs': 0.230319} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.380418] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.380670] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8824cd8e-e660-415f-8a7b-c3a3b46374e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.389350] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1033.389350] env[68244]: value = "task-2780821" [ 1033.389350] env[68244]: _type = "Task" [ 1033.389350] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.402500] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780821, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.416809] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.573248] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.820922] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.821465] env[68244]: DEBUG nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1033.824188] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.841s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.824408] env[68244]: DEBUG nova.objects.instance [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lazy-loading 'resources' on Instance uuid c73d39d9-1fb7-4ce7-8d60-9243bd6f519f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.886151] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.900301] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780821, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.993531] env[68244]: DEBUG nova.network.neutron [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Successfully updated port: 30943db2-6cce-4a90-9e45-5cf14eb6e799 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.002915] env[68244]: DEBUG nova.compute.manager [req-5a203a3f-8259-4429-84fa-5a76ebbe0256 req-db1048d1-6493-4087-9e45-b2d596acb225 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Received event network-vif-plugged-30943db2-6cce-4a90-9e45-5cf14eb6e799 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1034.002915] env[68244]: DEBUG oslo_concurrency.lockutils [req-5a203a3f-8259-4429-84fa-5a76ebbe0256 req-db1048d1-6493-4087-9e45-b2d596acb225 service nova] Acquiring lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.002915] env[68244]: DEBUG oslo_concurrency.lockutils [req-5a203a3f-8259-4429-84fa-5a76ebbe0256 req-db1048d1-6493-4087-9e45-b2d596acb225 service nova] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.003448] env[68244]: DEBUG oslo_concurrency.lockutils [req-5a203a3f-8259-4429-84fa-5a76ebbe0256 req-db1048d1-6493-4087-9e45-b2d596acb225 service nova] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.003448] env[68244]: DEBUG nova.compute.manager [req-5a203a3f-8259-4429-84fa-5a76ebbe0256 req-db1048d1-6493-4087-9e45-b2d596acb225 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] No waiting events found dispatching network-vif-plugged-30943db2-6cce-4a90-9e45-5cf14eb6e799 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.003448] env[68244]: WARNING nova.compute.manager [req-5a203a3f-8259-4429-84fa-5a76ebbe0256 req-db1048d1-6493-4087-9e45-b2d596acb225 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Received unexpected event network-vif-plugged-30943db2-6cce-4a90-9e45-5cf14eb6e799 for instance with vm_state building and task_state spawning. [ 1034.074319] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780820, 'name': ReconfigVM_Task, 'duration_secs': 0.574935} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.074626] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 246e079b-9fc1-442f-9c20-4e0c05e152e3/246e079b-9fc1-442f-9c20-4e0c05e152e3.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1034.075261] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82f05335-ea64-4340-80fa-295c99f9bf0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.081878] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1034.081878] env[68244]: value = "task-2780822" [ 1034.081878] env[68244]: _type = "Task" [ 1034.081878] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.090136] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780822, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.327767] env[68244]: DEBUG nova.compute.utils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1034.332043] env[68244]: DEBUG nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1034.332226] env[68244]: DEBUG nova.network.neutron [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1034.373154] env[68244]: DEBUG nova.policy [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f93ab312c1f44d7877c43a7b101cb5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4349b19805a8498392649e1b825d5da7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1034.402758] env[68244]: DEBUG oslo_vmware.api [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780821, 'name': PowerOnVM_Task, 'duration_secs': 0.635802} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.403167] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.403363] env[68244]: INFO nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Took 10.63 seconds to spawn the instance on the hypervisor. [ 1034.403747] env[68244]: DEBUG nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.406341] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e59ac3-30f2-4cea-bb9d-7890f8d696b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.500331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "refresh_cache-fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.500331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "refresh_cache-fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.500331] env[68244]: DEBUG nova.network.neutron [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.593489] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780822, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.719939] env[68244]: DEBUG nova.network.neutron [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Successfully created port: 57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1034.737389] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfca813d-6740-41cb-a9f5-8c0c7bdb3ccd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.745765] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69798e7-e8dc-4cfe-8cda-b40cdc2b7b0c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.783555] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd344e5c-4f2c-45c4-a38b-1990543ce04d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.791822] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c471c980-60e4-44b7-9480-86915e8f5f33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.805838] env[68244]: DEBUG nova.compute.provider_tree [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.834980] env[68244]: DEBUG nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1034.927818] env[68244]: INFO nova.compute.manager [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Took 38.86 seconds to build instance. [ 1035.033877] env[68244]: DEBUG nova.network.neutron [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.091936] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780822, 'name': Rename_Task, 'duration_secs': 0.694323} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.092171] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.092421] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75dc86c3-0b23-4088-8f3f-8dea1d92d20f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.098472] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1035.098472] env[68244]: value = "task-2780823" [ 1035.098472] env[68244]: _type = "Task" [ 1035.098472] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.111665] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.170768] env[68244]: DEBUG nova.network.neutron [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Updating instance_info_cache with network_info: [{"id": "30943db2-6cce-4a90-9e45-5cf14eb6e799", "address": "fa:16:3e:f5:e0:f2", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30943db2-6c", "ovs_interfaceid": "30943db2-6cce-4a90-9e45-5cf14eb6e799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.309634] env[68244]: DEBUG nova.scheduler.client.report [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.430585] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9fe6ebe-d9d9-447c-8cd1-c98ad7ce1220 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.372s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.611786] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780823, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.673209] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "refresh_cache-fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.673799] env[68244]: DEBUG nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Instance network_info: |[{"id": "30943db2-6cce-4a90-9e45-5cf14eb6e799", "address": "fa:16:3e:f5:e0:f2", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30943db2-6c", "ovs_interfaceid": "30943db2-6cce-4a90-9e45-5cf14eb6e799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1035.674150] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:e0:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30943db2-6cce-4a90-9e45-5cf14eb6e799', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.683746] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.684104] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1035.684398] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b427e1d4-2d16-43a0-8167-26f9bc3c68fc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.707373] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.707373] env[68244]: value = "task-2780824" [ 1035.707373] env[68244]: _type = "Task" [ 1035.707373] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.716167] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780824, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.816247] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.818967] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.699s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.819397] env[68244]: DEBUG nova.objects.instance [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lazy-loading 'resources' on Instance uuid aa5a373e-b34a-4f94-912b-0c7d20fc5b6c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.844416] env[68244]: INFO nova.scheduler.client.report [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted allocations for instance c73d39d9-1fb7-4ce7-8d60-9243bd6f519f [ 1035.846907] env[68244]: DEBUG nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1035.876238] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1035.876523] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1035.876681] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1035.876860] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1035.877186] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1035.877404] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1035.877652] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1035.877819] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1035.877989] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1035.878190] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1035.878373] env[68244]: DEBUG nova.virt.hardware [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1035.879346] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018a2ff5-3fbb-4031-8cd6-aefaa2712bbd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.888481] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5f6023-f615-4387-95cb-f0aefd50ed01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.030513] env[68244]: DEBUG nova.compute.manager [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Received event network-changed-30943db2-6cce-4a90-9e45-5cf14eb6e799 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1036.030646] env[68244]: DEBUG nova.compute.manager [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Refreshing instance network info cache due to event network-changed-30943db2-6cce-4a90-9e45-5cf14eb6e799. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1036.030848] env[68244]: DEBUG oslo_concurrency.lockutils [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] Acquiring lock "refresh_cache-fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.030996] env[68244]: DEBUG oslo_concurrency.lockutils [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] Acquired lock "refresh_cache-fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.031179] env[68244]: DEBUG nova.network.neutron [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Refreshing network info cache for port 30943db2-6cce-4a90-9e45-5cf14eb6e799 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.087187] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "0c336f72-1cb9-468a-bf59-b0de937e1e94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.087456] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.087687] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "0c336f72-1cb9-468a-bf59-b0de937e1e94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.087884] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.088077] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.090380] env[68244]: INFO nova.compute.manager [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Terminating instance [ 1036.110750] env[68244]: DEBUG oslo_vmware.api [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780823, 'name': PowerOnVM_Task, 'duration_secs': 0.676427} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.111273] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1036.111766] env[68244]: INFO nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1036.112008] env[68244]: DEBUG nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1036.112982] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98042633-69b6-4806-bfe0-e44ea12ea215 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.220425] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780824, 'name': CreateVM_Task, 'duration_secs': 0.473213} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.220617] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.221365] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.221525] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.221871] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1036.222156] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98b68d5-9819-425e-9844-546536a52a07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.233022] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1036.233022] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bebcaa-da74-4316-de00-339d79d88335" [ 1036.233022] env[68244]: _type = "Task" [ 1036.233022] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.241504] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bebcaa-da74-4316-de00-339d79d88335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.245186] env[68244]: DEBUG nova.network.neutron [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Successfully updated port: 57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1036.360358] env[68244]: DEBUG oslo_concurrency.lockutils [None req-579adec5-b1fb-4fee-a608-acf492e8e6d0 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "c73d39d9-1fb7-4ce7-8d60-9243bd6f519f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.786s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.596558] env[68244]: DEBUG nova.compute.manager [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.596840] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.597726] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e0e9cf-2317-4d1f-927b-817d5905ed78 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.605918] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.608361] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18e8387c-0576-403b-8eff-c75d1b5b6760 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.616888] env[68244]: DEBUG oslo_vmware.api [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1036.616888] env[68244]: value = "task-2780825" [ 1036.616888] env[68244]: _type = "Task" [ 1036.616888] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.625926] env[68244]: DEBUG oslo_vmware.api [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.638517] env[68244]: INFO nova.compute.manager [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Took 35.05 seconds to build instance. [ 1036.687335] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539d300c-b3aa-4411-88eb-5fa84dd2c640 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.699029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da621c0-9528-4bc6-9910-0b0ea5aabc4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.738569] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df7c023-f3dc-47b6-8ee5-1e5ec8b16a30 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.747685] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.747685] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.747799] env[68244]: DEBUG nova.network.neutron [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1036.748959] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bebcaa-da74-4316-de00-339d79d88335, 'name': SearchDatastore_Task, 'duration_secs': 0.019345} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.751504] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.751965] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.752112] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.753039] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.753039] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.753189] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d25c4acc-d7e6-4296-a6f8-394bd62399f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.756295] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2527c6-8e8e-4ab7-818a-4959d57bd1d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.774322] env[68244]: DEBUG nova.compute.provider_tree [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.776422] env[68244]: DEBUG nova.network.neutron [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Updated VIF entry in instance network info cache for port 30943db2-6cce-4a90-9e45-5cf14eb6e799. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.776750] env[68244]: DEBUG nova.network.neutron [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Updating instance_info_cache with network_info: [{"id": "30943db2-6cce-4a90-9e45-5cf14eb6e799", "address": "fa:16:3e:f5:e0:f2", "network": {"id": "27468ef8-a577-4f34-97d6-d2287a51c33f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-464670138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "821b99c053aa45b4b6b8fb09eb63aa73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30943db2-6c", "ovs_interfaceid": "30943db2-6cce-4a90-9e45-5cf14eb6e799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.779196] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.779305] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.780387] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-396e71cc-6ecd-4b55-b568-49e2565e4ba3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.786135] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1036.786135] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285e9ce-d4a7-36d3-56e4-ff6f5ee5b4a6" [ 1036.786135] env[68244]: _type = "Task" [ 1036.786135] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.795782] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285e9ce-d4a7-36d3-56e4-ff6f5ee5b4a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.126767] env[68244]: DEBUG oslo_vmware.api [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780825, 'name': PowerOffVM_Task, 'duration_secs': 0.261375} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.127837] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.127837] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.127837] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf261cab-7cd3-4cdd-845c-1fc1076f8891 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.142832] env[68244]: DEBUG oslo_concurrency.lockutils [None req-51ccbecd-1627-4b3c-9486-d8fba3deed63 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.568s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.190619] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.190959] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.191167] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleting the datastore file [datastore2] 0c336f72-1cb9-468a-bf59-b0de937e1e94 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.191494] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57c211ee-e4f6-4de8-94ac-fe42bb1f2f46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.199405] env[68244]: DEBUG oslo_vmware.api [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1037.199405] env[68244]: value = "task-2780827" [ 1037.199405] env[68244]: _type = "Task" [ 1037.199405] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.210038] env[68244]: DEBUG oslo_vmware.api [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.281086] env[68244]: DEBUG oslo_concurrency.lockutils [req-e8e127dd-0adb-4606-9a38-e136bc71ac0e req-8cf6cc92-f844-4c2e-9e8e-da10730cc5d4 service nova] Releasing lock "refresh_cache-fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.282143] env[68244]: DEBUG nova.scheduler.client.report [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1037.286869] env[68244]: DEBUG nova.network.neutron [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1037.299232] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285e9ce-d4a7-36d3-56e4-ff6f5ee5b4a6, 'name': SearchDatastore_Task, 'duration_secs': 0.014861} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.300740] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd7297ce-cf97-4ee8-aa9b-cbdaf880cca6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.307441] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1037.307441] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7d3b5-6f75-21a6-0003-bea9de135b64" [ 1037.307441] env[68244]: _type = "Task" [ 1037.307441] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.319469] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7d3b5-6f75-21a6-0003-bea9de135b64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.460611] env[68244]: DEBUG nova.network.neutron [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updating instance_info_cache with network_info: [{"id": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "address": "fa:16:3e:14:2c:89", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57ce8ddd-0a", "ovs_interfaceid": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.713642] env[68244]: DEBUG oslo_vmware.api [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.257823} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.713951] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.714154] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.718343] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.718541] env[68244]: INFO nova.compute.manager [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1037.718838] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.719134] env[68244]: DEBUG nova.compute.manager [-] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.719293] env[68244]: DEBUG nova.network.neutron [-] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.790358] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.972s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.792624] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.493s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.793111] env[68244]: DEBUG nova.objects.instance [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lazy-loading 'resources' on Instance uuid c70fb986-8396-4f11-98c4-1ed977a23bcd {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.814136] env[68244]: INFO nova.scheduler.client.report [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Deleted allocations for instance aa5a373e-b34a-4f94-912b-0c7d20fc5b6c [ 1037.823166] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7d3b5-6f75-21a6-0003-bea9de135b64, 'name': SearchDatastore_Task, 'duration_secs': 0.010599} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.824303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.824588] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f/fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.824875] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-049e40b8-ff08-4293-b498-2ef7b3ee8f2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.833564] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1037.833564] env[68244]: value = "task-2780828" [ 1037.833564] env[68244]: _type = "Task" [ 1037.833564] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.844436] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.964841] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.965054] env[68244]: DEBUG nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Instance network_info: |[{"id": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "address": "fa:16:3e:14:2c:89", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57ce8ddd-0a", "ovs_interfaceid": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1037.965860] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:2c:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57ce8ddd-0a20-4416-bf55-acd66870ad00', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1037.979732] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Creating folder: Project (4349b19805a8498392649e1b825d5da7). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1037.980319] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f5a6c3d-85a4-461b-a98b-43f2b86ff0ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.991965] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Created folder: Project (4349b19805a8498392649e1b825d5da7) in parent group-v558876. [ 1037.992372] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Creating folder: Instances. Parent ref: group-v559091. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1037.992760] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85e4b346-b0e8-44c3-a7c8-3b5b0ddd5d25 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.006111] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Created folder: Instances in parent group-v559091. [ 1038.006432] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.007288] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.007530] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77d26a80-d4f0-4008-a6e7-a15c28443529 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.032402] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.032402] env[68244]: value = "task-2780831" [ 1038.032402] env[68244]: _type = "Task" [ 1038.032402] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.045054] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780831, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.148241] env[68244]: DEBUG nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Received event network-vif-plugged-57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1038.148241] env[68244]: DEBUG oslo_concurrency.lockutils [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] Acquiring lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.148241] env[68244]: DEBUG oslo_concurrency.lockutils [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.148241] env[68244]: DEBUG oslo_concurrency.lockutils [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.148577] env[68244]: DEBUG nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] No waiting events found dispatching network-vif-plugged-57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1038.148630] env[68244]: WARNING nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Received unexpected event network-vif-plugged-57ce8ddd-0a20-4416-bf55-acd66870ad00 for instance with vm_state building and task_state spawning. [ 1038.148845] env[68244]: DEBUG nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Received event network-changed-57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1038.149080] env[68244]: DEBUG nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Refreshing instance network info cache due to event network-changed-57ce8ddd-0a20-4416-bf55-acd66870ad00. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1038.149388] env[68244]: DEBUG oslo_concurrency.lockutils [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] Acquiring lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.149621] env[68244]: DEBUG oslo_concurrency.lockutils [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] Acquired lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.149845] env[68244]: DEBUG nova.network.neutron [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Refreshing network info cache for port 57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1038.327820] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ca2140c-fbc4-4f92-b32b-3aaffc1d2df8 tempest-ServersListShow298Test-1238337987 tempest-ServersListShow298Test-1238337987-project-member] Lock "aa5a373e-b34a-4f94-912b-0c7d20fc5b6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.516s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.347895] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780828, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503202} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.348102] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f/fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.348487] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.348624] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f78e956c-99d3-40a6-b034-3b08ebe8e2ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.356143] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1038.356143] env[68244]: value = "task-2780832" [ 1038.356143] env[68244]: _type = "Task" [ 1038.356143] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.365918] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.500282] env[68244]: DEBUG nova.network.neutron [-] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.544681] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780831, 'name': CreateVM_Task, 'duration_secs': 0.429208} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.544681] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1038.545226] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.545391] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.545716] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1038.545981] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08e0a707-7a41-4925-94f0-699a00456085 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.551013] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1038.551013] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525f8fe7-9936-6ce9-4528-4d6438e88221" [ 1038.551013] env[68244]: _type = "Task" [ 1038.551013] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.562159] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525f8fe7-9936-6ce9-4528-4d6438e88221, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.700114] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99379a1b-370e-4abd-85ce-c923c802e39c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.707530] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1616cb0-4b30-4213-863e-347023288c5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.750343] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee05725-d58b-4d02-b3a9-4a00b0b85934 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.758767] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c9014e-d104-42eb-a6ba-345db174db34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.773616] env[68244]: DEBUG nova.compute.provider_tree [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1038.867062] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092302} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.867062] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.868280] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b48a10-8fe5-4c2c-8d0e-86860e4c6fac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.890725] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f/fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.893484] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99ef381f-e288-4cf9-ae3d-3283038c5e80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.913929] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1038.913929] env[68244]: value = "task-2780833" [ 1038.913929] env[68244]: _type = "Task" [ 1038.913929] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.924699] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780833, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.944314] env[68244]: DEBUG nova.network.neutron [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updated VIF entry in instance network info cache for port 57ce8ddd-0a20-4416-bf55-acd66870ad00. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1038.944665] env[68244]: DEBUG nova.network.neutron [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updating instance_info_cache with network_info: [{"id": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "address": "fa:16:3e:14:2c:89", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57ce8ddd-0a", "ovs_interfaceid": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.005931] env[68244]: INFO nova.compute.manager [-] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Took 1.29 seconds to deallocate network for instance. [ 1039.062055] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525f8fe7-9936-6ce9-4528-4d6438e88221, 'name': SearchDatastore_Task, 'duration_secs': 0.010317} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.062170] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.062394] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1039.062632] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.062780] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.062960] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.063243] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ae17591-321c-4457-b004-56d1800a4857 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.071719] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.071908] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1039.072665] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cf90469-426f-4d10-ba6d-955d80121ed5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.077823] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1039.077823] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b6d55-9bd2-fc0a-eba4-99489f975582" [ 1039.077823] env[68244]: _type = "Task" [ 1039.077823] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.087298] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b6d55-9bd2-fc0a-eba4-99489f975582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.149032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "246e079b-9fc1-442f-9c20-4e0c05e152e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.149032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.149032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "246e079b-9fc1-442f-9c20-4e0c05e152e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.149285] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.149430] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.151768] env[68244]: INFO nova.compute.manager [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Terminating instance [ 1039.296232] env[68244]: ERROR nova.scheduler.client.report [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] [req-2fdec915-c798-42f5-8e06-080a12a97895] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2fdec915-c798-42f5-8e06-080a12a97895"}]} [ 1039.312369] env[68244]: DEBUG nova.scheduler.client.report [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1039.328593] env[68244]: DEBUG nova.scheduler.client.report [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1039.328816] env[68244]: DEBUG nova.compute.provider_tree [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1039.340445] env[68244]: DEBUG nova.scheduler.client.report [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1039.359567] env[68244]: DEBUG nova.scheduler.client.report [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1039.424834] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780833, 'name': ReconfigVM_Task, 'duration_secs': 0.301317} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.425273] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Reconfigured VM instance instance-00000051 to attach disk [datastore1] fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f/fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.428033] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b64dd8c-b716-4261-8315-af82137d68a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.433878] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1039.433878] env[68244]: value = "task-2780834" [ 1039.433878] env[68244]: _type = "Task" [ 1039.433878] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.445346] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780834, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.448025] env[68244]: DEBUG oslo_concurrency.lockutils [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] Releasing lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.448287] env[68244]: DEBUG nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Received event network-vif-deleted-03726444-b143-4a33-aabb-f2e439740b2d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1039.448462] env[68244]: INFO nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Neutron deleted interface 03726444-b143-4a33-aabb-f2e439740b2d; detaching it from the instance and deleting it from the info cache [ 1039.448679] env[68244]: DEBUG nova.network.neutron [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.514890] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.587730] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525b6d55-9bd2-fc0a-eba4-99489f975582, 'name': SearchDatastore_Task, 'duration_secs': 0.008515} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.591258] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c6bc72e-88fb-4b60-b79e-b4fba5d6d967 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.596725] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1039.596725] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e3a675-bcf9-da32-9fd2-aa62e70bb46d" [ 1039.596725] env[68244]: _type = "Task" [ 1039.596725] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.606967] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e3a675-bcf9-da32-9fd2-aa62e70bb46d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.655679] env[68244]: DEBUG nova.compute.manager [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1039.655914] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.656798] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c827abf5-7e0e-4311-ab5f-af92985f8534 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.667327] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.667327] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26074cc7-5dc6-4d3e-81e2-8bee497ef2e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.672930] env[68244]: DEBUG oslo_vmware.api [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1039.672930] env[68244]: value = "task-2780835" [ 1039.672930] env[68244]: _type = "Task" [ 1039.672930] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.684232] env[68244]: DEBUG oslo_vmware.api [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.689694] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e34e54-0cab-4390-8ae4-d2e4b820e26b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.697332] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ed4eb6-c844-40ed-8bcf-4f94559472ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.729775] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532397d2-273e-4ed5-8ae5-a9083393b13f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.737372] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a354c44e-0495-47e0-9e43-ec74235e923f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.752838] env[68244]: DEBUG nova.compute.provider_tree [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1039.818842] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280abd8-73a6-6131-f978-1b826fb5c8d8/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1039.819833] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9920588e-5c80-45b2-bda4-d90909e8e23f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.826662] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280abd8-73a6-6131-f978-1b826fb5c8d8/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1039.826830] env[68244]: ERROR oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280abd8-73a6-6131-f978-1b826fb5c8d8/disk-0.vmdk due to incomplete transfer. [ 1039.827064] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-85ce4537-4beb-414c-bcbc-6a24b94a592f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.833862] env[68244]: DEBUG oslo_vmware.rw_handles [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280abd8-73a6-6131-f978-1b826fb5c8d8/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1039.834074] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Uploaded image 9b708d7d-4aac-4e76-b504-fa7363e0a73e to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1039.836423] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1039.836666] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-359e058f-6e22-4fca-b65d-1e370d581765 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.842901] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1039.842901] env[68244]: value = "task-2780836" [ 1039.842901] env[68244]: _type = "Task" [ 1039.842901] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.850738] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780836, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.943757] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780834, 'name': Rename_Task, 'duration_secs': 0.13876} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.944141] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.944420] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0a60437-939c-4212-a16e-77f6e38c4136 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.950897] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1039.950897] env[68244]: value = "task-2780837" [ 1039.950897] env[68244]: _type = "Task" [ 1039.950897] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.951105] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73d78935-d7d8-4693-8d6c-29b593e1a7e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.960676] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780837, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.964736] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba6d399-af29-4c65-ae08-40836020fd5c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.998527] env[68244]: DEBUG nova.compute.manager [req-022c698d-8cc6-44a5-9202-9507e30e244b req-1f612446-6e36-4ea6-b76b-44e164134554 service nova] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Detach interface failed, port_id=03726444-b143-4a33-aabb-f2e439740b2d, reason: Instance 0c336f72-1cb9-468a-bf59-b0de937e1e94 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1040.107919] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e3a675-bcf9-da32-9fd2-aa62e70bb46d, 'name': SearchDatastore_Task, 'duration_secs': 0.008905} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.108216] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.108515] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33/1ba4f3f5-726e-482f-a821-d2ee1bbd4c33.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1040.110038] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfcf9ca8-e987-422c-aec1-694925667607 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.115486] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1040.115486] env[68244]: value = "task-2780838" [ 1040.115486] env[68244]: _type = "Task" [ 1040.115486] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.123585] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.183363] env[68244]: DEBUG oslo_vmware.api [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780835, 'name': PowerOffVM_Task, 'duration_secs': 0.27138} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.183697] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.183986] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.184338] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09db3112-c969-464a-a8cf-c9e7c72f4121 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.263904] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.264214] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.264459] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Deleting the datastore file [datastore2] 246e079b-9fc1-442f-9c20-4e0c05e152e3 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.264780] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-926c3cd1-b151-4657-bf26-cd247b57b424 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.275473] env[68244]: DEBUG oslo_vmware.api [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for the task: (returnval){ [ 1040.275473] env[68244]: value = "task-2780840" [ 1040.275473] env[68244]: _type = "Task" [ 1040.275473] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.283198] env[68244]: DEBUG oslo_vmware.api [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.286382] env[68244]: DEBUG nova.scheduler.client.report [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1040.286521] env[68244]: DEBUG nova.compute.provider_tree [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 122 to 123 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1040.286651] env[68244]: DEBUG nova.compute.provider_tree [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1040.353503] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780836, 'name': Destroy_Task, 'duration_secs': 0.487081} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.353833] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Destroyed the VM [ 1040.354099] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1040.354384] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-00e08cee-b4b0-4ec8-a57a-20bc2e882207 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.363211] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1040.363211] env[68244]: value = "task-2780841" [ 1040.363211] env[68244]: _type = "Task" [ 1040.363211] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.372735] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780841, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.465993] env[68244]: DEBUG oslo_vmware.api [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780837, 'name': PowerOnVM_Task, 'duration_secs': 0.511658} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.466318] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1040.466531] env[68244]: INFO nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1040.466713] env[68244]: DEBUG nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.467798] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5d304d-f82c-4e87-bb8e-0d20dbf3510e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.625385] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780838, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.786650] env[68244]: DEBUG oslo_vmware.api [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Task: {'id': task-2780840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.37215} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.786960] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.787257] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.787514] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.787737] env[68244]: INFO nova.compute.manager [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1040.788318] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.788555] env[68244]: DEBUG nova.compute.manager [-] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1040.788619] env[68244]: DEBUG nova.network.neutron [-] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.793656] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.796140] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.151s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.796428] env[68244]: DEBUG nova.objects.instance [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lazy-loading 'resources' on Instance uuid 10957648-8618-4f2c-8b08-5468bca20cfc {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.817673] env[68244]: INFO nova.scheduler.client.report [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Deleted allocations for instance c70fb986-8396-4f11-98c4-1ed977a23bcd [ 1040.876681] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780841, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.989085] env[68244]: INFO nova.compute.manager [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Took 33.67 seconds to build instance. [ 1041.077087] env[68244]: DEBUG nova.compute.manager [req-42b145e8-673e-4f4b-b562-382989634046 req-2ad6b277-a5d6-41d7-91fb-66a169d870b6 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Received event network-vif-deleted-435bf34a-456f-410e-89a2-4450cef07161 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1041.077495] env[68244]: INFO nova.compute.manager [req-42b145e8-673e-4f4b-b562-382989634046 req-2ad6b277-a5d6-41d7-91fb-66a169d870b6 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Neutron deleted interface 435bf34a-456f-410e-89a2-4450cef07161; detaching it from the instance and deleting it from the info cache [ 1041.077685] env[68244]: DEBUG nova.network.neutron [req-42b145e8-673e-4f4b-b562-382989634046 req-2ad6b277-a5d6-41d7-91fb-66a169d870b6 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.127543] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780838, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528233} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.128038] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33/1ba4f3f5-726e-482f-a821-d2ee1bbd4c33.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1041.128301] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1041.128557] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a74e359a-2652-4542-9c97-e0b4a5305f7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.135826] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1041.135826] env[68244]: value = "task-2780842" [ 1041.135826] env[68244]: _type = "Task" [ 1041.135826] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.144512] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780842, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.330814] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cd217792-b5a4-485c-9457-70c787dd15c4 tempest-ServersV294TestFqdnHostnames-842131445 tempest-ServersV294TestFqdnHostnames-842131445-project-member] Lock "c70fb986-8396-4f11-98c4-1ed977a23bcd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.654s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.377983] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780841, 'name': RemoveSnapshot_Task} progress is 70%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.490535] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2976d986-daa7-4d6a-9ad0-b79a778c09b5 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.188s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.542591] env[68244]: DEBUG nova.network.neutron [-] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.581764] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa79e0fc-0dc9-4ce8-aea1-b7e3e1e4f08d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.590046] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11656896-38b6-4ef4-99f4-e1fac5c9f772 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.625991] env[68244]: DEBUG nova.compute.manager [req-42b145e8-673e-4f4b-b562-382989634046 req-2ad6b277-a5d6-41d7-91fb-66a169d870b6 service nova] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Detach interface failed, port_id=435bf34a-456f-410e-89a2-4450cef07161, reason: Instance 246e079b-9fc1-442f-9c20-4e0c05e152e3 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1041.641597] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e912d6-22c7-4c9f-9336-37e728ec0e27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.648936] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105496} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.651182] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1041.651996] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374e16b0-509b-48ab-a09d-342dd0cf5266 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.654955] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfd6a2e-04e1-4d45-a651-82bf1aa7cd98 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.677400] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33/1ba4f3f5-726e-482f-a821-d2ee1bbd4c33.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.703832] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17b3e704-a82c-4cb3-ad83-fff68769d459 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.719281] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e469c8-560c-4b85-ae27-8d0046da0e9e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.728149] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22327631-6755-4ae5-9f8c-5aa6f1569eb4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.732259] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1041.732259] env[68244]: value = "task-2780843" [ 1041.732259] env[68244]: _type = "Task" [ 1041.732259] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.743622] env[68244]: DEBUG nova.compute.provider_tree [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.749968] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780843, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.875478] env[68244]: DEBUG oslo_vmware.api [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780841, 'name': RemoveSnapshot_Task, 'duration_secs': 1.073595} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.875813] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1041.876011] env[68244]: INFO nova.compute.manager [None req-e6fa9d32-8644-4af0-9e2d-1e6ff65214a2 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Took 16.22 seconds to snapshot the instance on the hypervisor. [ 1042.051263] env[68244]: INFO nova.compute.manager [-] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Took 1.26 seconds to deallocate network for instance. [ 1042.243569] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780843, 'name': ReconfigVM_Task, 'duration_secs': 0.320666} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.244023] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33/1ba4f3f5-726e-482f-a821-d2ee1bbd4c33.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.244815] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88ebd6f8-53c5-4cd3-bea2-d412e6da257e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.247238] env[68244]: DEBUG nova.scheduler.client.report [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.256154] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1042.256154] env[68244]: value = "task-2780844" [ 1042.256154] env[68244]: _type = "Task" [ 1042.256154] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.264685] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780844, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.557997] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.578273] env[68244]: DEBUG nova.compute.manager [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1042.579294] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b0476f-1e5c-4eef-abcf-593acd881673 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.756453] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.960s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.759514] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.433s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.761034] env[68244]: INFO nova.compute.claims [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.773555] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780844, 'name': Rename_Task, 'duration_secs': 0.148134} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.773828] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.774577] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9997b2c-aac2-41df-bf24-5e02902385e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.780093] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1042.780093] env[68244]: value = "task-2780845" [ 1042.780093] env[68244]: _type = "Task" [ 1042.780093] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.786817] env[68244]: INFO nova.scheduler.client.report [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Deleted allocations for instance 10957648-8618-4f2c-8b08-5468bca20cfc [ 1042.790861] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780845, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.092371] env[68244]: INFO nova.compute.manager [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] instance snapshotting [ 1043.095327] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd93bcc7-0358-40b3-a421-8606756174cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.115247] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ee03a5-1206-4498-a812-a6aa3539e614 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.291816] env[68244]: DEBUG oslo_vmware.api [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780845, 'name': PowerOnVM_Task, 'duration_secs': 0.489382} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.292272] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.292423] env[68244]: INFO nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Took 7.45 seconds to spawn the instance on the hypervisor. [ 1043.292537] env[68244]: DEBUG nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.293502] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109649e6-6bb3-4d5c-b38b-ceae9420dd4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.304023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5c064d34-3d87-4489-8075-5aa3657d9672 tempest-MigrationsAdminTest-1911188526 tempest-MigrationsAdminTest-1911188526-project-member] Lock "10957648-8618-4f2c-8b08-5468bca20cfc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.155s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.626296] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1043.626636] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-113d36ab-47f8-4f7f-9a94-0aa692711263 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.634935] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1043.634935] env[68244]: value = "task-2780846" [ 1043.634935] env[68244]: _type = "Task" [ 1043.634935] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.643533] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780846, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.820825] env[68244]: INFO nova.compute.manager [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Took 35.93 seconds to build instance. [ 1044.132330] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8811792-cb2a-41f4-8d0d-ab88dcc8c457 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.148892] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780846, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.148892] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc4594a-1927-4ab4-ad49-e7be253413d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.181329] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b784ee9b-e114-4ca5-b270-ade740392a94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.189802] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4626e7ae-d273-410e-babc-231e93827e2b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.203243] env[68244]: DEBUG nova.compute.provider_tree [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.323255] env[68244]: DEBUG oslo_concurrency.lockutils [None req-40195e4b-65f2-4694-b524-4a053d3f3208 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.442s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.374188] env[68244]: DEBUG nova.compute.manager [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1044.375140] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c71aac-4996-4f6b-87b0-8747738ad3e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.462187] env[68244]: DEBUG nova.compute.manager [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Received event network-changed-57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1044.462394] env[68244]: DEBUG nova.compute.manager [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Refreshing instance network info cache due to event network-changed-57ce8ddd-0a20-4416-bf55-acd66870ad00. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1044.462630] env[68244]: DEBUG oslo_concurrency.lockutils [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] Acquiring lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.462732] env[68244]: DEBUG oslo_concurrency.lockutils [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] Acquired lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.462884] env[68244]: DEBUG nova.network.neutron [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Refreshing network info cache for port 57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1044.648135] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780846, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.708933] env[68244]: DEBUG nova.scheduler.client.report [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1044.887467] env[68244]: INFO nova.compute.manager [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] instance snapshotting [ 1044.890837] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47108c02-de6e-45d4-a758-e0d4ce1857f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.910194] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ffefc3-8ba0-473c-a8c4-7ec3d05a96e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.147348] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780846, 'name': CreateSnapshot_Task, 'duration_secs': 1.067641} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.147668] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1045.148390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56668a9-641e-4363-aa27-7b7861601685 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.213147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.213776] env[68244]: DEBUG nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1045.217838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 27.130s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.304545] env[68244]: DEBUG nova.network.neutron [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updated VIF entry in instance network info cache for port 57ce8ddd-0a20-4416-bf55-acd66870ad00. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1045.304904] env[68244]: DEBUG nova.network.neutron [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updating instance_info_cache with network_info: [{"id": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "address": "fa:16:3e:14:2c:89", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57ce8ddd-0a", "ovs_interfaceid": "57ce8ddd-0a20-4416-bf55-acd66870ad00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.420749] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1045.421159] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-24fc4c7d-4103-40b6-b775-f82167fa8503 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.429405] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1045.429405] env[68244]: value = "task-2780847" [ 1045.429405] env[68244]: _type = "Task" [ 1045.429405] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.437030] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780847, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.670168] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1045.670168] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c655eadc-32f0-4648-9e13-c1032c5a668b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.677270] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1045.677270] env[68244]: value = "task-2780848" [ 1045.677270] env[68244]: _type = "Task" [ 1045.677270] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.687630] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780848, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.719999] env[68244]: DEBUG nova.compute.utils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1045.721511] env[68244]: DEBUG nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1045.721724] env[68244]: DEBUG nova.network.neutron [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.726204] env[68244]: INFO nova.compute.claims [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.813259] env[68244]: DEBUG oslo_concurrency.lockutils [req-af942299-1217-41c3-8885-22bdbc6436b8 req-e58955aa-79c9-49e8-b8c4-571852c23c05 service nova] Releasing lock "refresh_cache-1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.815447] env[68244]: DEBUG nova.policy [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f38e77dbe63b4bcea989e26fa65d4f24', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3896da00d8fc4028a27d7b7c019ff686', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1045.941272] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780847, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.168758] env[68244]: DEBUG nova.network.neutron [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Successfully created port: 98fde36b-000a-43da-ac75-ac997cb773c2 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.191214] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780848, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.231513] env[68244]: DEBUG nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1046.238241] env[68244]: INFO nova.compute.resource_tracker [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating resource usage from migration 9d3432d6-d1b5-4178-bedc-2e9fe7a3754c [ 1046.451806] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780847, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.614335] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8b7236-8fba-4393-8b16-d5f098381044 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.627361] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83855cad-7505-4b26-b9c5-58182306d599 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.661580] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60a4549-74e4-43ee-bd5c-b23fc52e0ff0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.670385] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49422788-cb5f-43e1-886e-2f6cbf8d2c46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.684834] env[68244]: DEBUG nova.compute.provider_tree [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.695815] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780848, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.945548] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780847, 'name': CreateSnapshot_Task, 'duration_secs': 1.082472} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.945831] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1046.946825] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05f9691-9029-4005-af58-fe233a9f8d68 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.190998] env[68244]: DEBUG nova.scheduler.client.report [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.202582] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780848, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.252475] env[68244]: DEBUG nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1047.298383] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.298875] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.299173] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.301799] env[68244]: DEBUG nova.virt.hardware [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.301799] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a3d913-b9e3-432e-8561-cdbbd3100220 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.312240] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54c334c-1cd0-498f-953e-c0bbb111c1c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.465838] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1047.466330] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-48e4d1e0-5d90-401e-880d-f123276384f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.475352] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1047.475352] env[68244]: value = "task-2780849" [ 1047.475352] env[68244]: _type = "Task" [ 1047.475352] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.484330] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780849, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.606655] env[68244]: DEBUG nova.compute.manager [req-f100e813-192c-441a-9094-9d9ae6e87df1 req-432f6df3-d402-4792-aacd-b06e961d8e89 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Received event network-vif-plugged-98fde36b-000a-43da-ac75-ac997cb773c2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1047.606655] env[68244]: DEBUG oslo_concurrency.lockutils [req-f100e813-192c-441a-9094-9d9ae6e87df1 req-432f6df3-d402-4792-aacd-b06e961d8e89 service nova] Acquiring lock "cd2c4986-2092-4bc5-94c6-222f036c5e83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.606655] env[68244]: DEBUG oslo_concurrency.lockutils [req-f100e813-192c-441a-9094-9d9ae6e87df1 req-432f6df3-d402-4792-aacd-b06e961d8e89 service nova] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.606655] env[68244]: DEBUG oslo_concurrency.lockutils [req-f100e813-192c-441a-9094-9d9ae6e87df1 req-432f6df3-d402-4792-aacd-b06e961d8e89 service nova] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.606655] env[68244]: DEBUG nova.compute.manager [req-f100e813-192c-441a-9094-9d9ae6e87df1 req-432f6df3-d402-4792-aacd-b06e961d8e89 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] No waiting events found dispatching network-vif-plugged-98fde36b-000a-43da-ac75-ac997cb773c2 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1047.606655] env[68244]: WARNING nova.compute.manager [req-f100e813-192c-441a-9094-9d9ae6e87df1 req-432f6df3-d402-4792-aacd-b06e961d8e89 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Received unexpected event network-vif-plugged-98fde36b-000a-43da-ac75-ac997cb773c2 for instance with vm_state building and task_state spawning. [ 1047.697129] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.479s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.697420] env[68244]: INFO nova.compute.manager [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Migrating [ 1047.708190] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780848, 'name': CloneVM_Task, 'duration_secs': 1.532418} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.710428] env[68244]: DEBUG nova.network.neutron [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Successfully updated port: 98fde36b-000a-43da-ac75-ac997cb773c2 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1047.715019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.425s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.715019] env[68244]: DEBUG nova.objects.instance [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lazy-loading 'resources' on Instance uuid f579141b-1fac-4541-99c3-07644a0a358c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.715019] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Created linked-clone VM from snapshot [ 1047.722213] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688022f5-bbb3-4436-8cc6-c0297afffa22 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.735402] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Uploading image 395a64e8-14c2-4cdf-b6d3-b69bfb75d020 {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1047.748997] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1047.749471] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-99d8eae4-7ec1-49ea-8bd5-7bef7ee3d8f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.760017] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1047.760017] env[68244]: value = "task-2780850" [ 1047.760017] env[68244]: _type = "Task" [ 1047.760017] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.771367] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780850, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.987599] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780849, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.148240] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.148240] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.222354] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.222669] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.222722] env[68244]: DEBUG nova.network.neutron [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1048.226181] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "refresh_cache-cd2c4986-2092-4bc5-94c6-222f036c5e83" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.226181] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquired lock "refresh_cache-cd2c4986-2092-4bc5-94c6-222f036c5e83" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.226181] env[68244]: DEBUG nova.network.neutron [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1048.273800] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780850, 'name': Destroy_Task, 'duration_secs': 0.50134} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.274176] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Destroyed the VM [ 1048.274428] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1048.274685] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b8de407f-eac3-41f8-af89-3171e5f15145 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.282515] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1048.282515] env[68244]: value = "task-2780851" [ 1048.282515] env[68244]: _type = "Task" [ 1048.282515] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.291552] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780851, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.488770] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780849, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.571047] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc89c0e-f6a4-4933-b876-73b112affec7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.579651] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18349ba-e358-4ba4-a4ad-6a554f660656 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.611313] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7189378d-2ba9-44f0-9159-3651f2a35220 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.620427] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f32636-e8ba-4d5b-b3bc-9e314c567dd8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.639843] env[68244]: DEBUG nova.compute.provider_tree [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.649762] env[68244]: DEBUG nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1048.789992] env[68244]: DEBUG nova.network.neutron [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1048.797823] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780851, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.994768] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780849, 'name': CloneVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.033272] env[68244]: DEBUG nova.network.neutron [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Updating instance_info_cache with network_info: [{"id": "98fde36b-000a-43da-ac75-ac997cb773c2", "address": "fa:16:3e:cb:72:e2", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fde36b-00", "ovs_interfaceid": "98fde36b-000a-43da-ac75-ac997cb773c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.086867] env[68244]: DEBUG nova.network.neutron [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance_info_cache with network_info: [{"id": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "address": "fa:16:3e:cb:96:5a", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5c56d0b-95", "ovs_interfaceid": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.143180] env[68244]: DEBUG nova.scheduler.client.report [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.174627] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.294569] env[68244]: DEBUG oslo_vmware.api [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780851, 'name': RemoveSnapshot_Task, 'duration_secs': 0.754341} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.294883] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1049.489389] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780849, 'name': CloneVM_Task, 'duration_secs': 1.663568} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.489751] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Created linked-clone VM from snapshot [ 1049.490563] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675c76b6-f4ed-4554-ba35-164e4b3a1f12 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.498463] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Uploading image a40fdbaa-a783-493e-82aa-87bca322893c {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1049.513550] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.513823] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.513998] env[68244]: INFO nova.compute.manager [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Shelving [ 1049.525268] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1049.525268] env[68244]: value = "vm-559097" [ 1049.525268] env[68244]: _type = "VirtualMachine" [ 1049.525268] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1049.525871] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e9e490aa-7b6f-4f29-9f97-fd097f2742aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.535793] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Releasing lock "refresh_cache-cd2c4986-2092-4bc5-94c6-222f036c5e83" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.536133] env[68244]: DEBUG nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Instance network_info: |[{"id": "98fde36b-000a-43da-ac75-ac997cb773c2", "address": "fa:16:3e:cb:72:e2", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fde36b-00", "ovs_interfaceid": "98fde36b-000a-43da-ac75-ac997cb773c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1049.536759] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:72:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98fde36b-000a-43da-ac75-ac997cb773c2', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.544282] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Creating folder: Project (3896da00d8fc4028a27d7b7c019ff686). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1049.544588] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e52bf6d6-6958-42f2-8014-a11a67b90fb5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.549772] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lease: (returnval){ [ 1049.549772] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523b3ea8-9857-b1c1-7029-3949b5a9f5de" [ 1049.549772] env[68244]: _type = "HttpNfcLease" [ 1049.549772] env[68244]: } obtained for exporting VM: (result){ [ 1049.549772] env[68244]: value = "vm-559097" [ 1049.549772] env[68244]: _type = "VirtualMachine" [ 1049.549772] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1049.550169] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the lease: (returnval){ [ 1049.550169] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523b3ea8-9857-b1c1-7029-3949b5a9f5de" [ 1049.550169] env[68244]: _type = "HttpNfcLease" [ 1049.550169] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1049.557120] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Created folder: Project (3896da00d8fc4028a27d7b7c019ff686) in parent group-v558876. [ 1049.557386] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Creating folder: Instances. Parent ref: group-v559098. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1049.558558] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd4de578-20b8-4151-a2d2-721fd53983cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.560074] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1049.560074] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523b3ea8-9857-b1c1-7029-3949b5a9f5de" [ 1049.560074] env[68244]: _type = "HttpNfcLease" [ 1049.560074] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1049.568510] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Created folder: Instances in parent group-v559098. [ 1049.568750] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.568933] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1049.569152] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-621b792c-8576-496f-b08f-4bfeb9f169eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.590164] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.590164] env[68244]: value = "task-2780855" [ 1049.590164] env[68244]: _type = "Task" [ 1049.590164] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.590745] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.601421] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780855, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.631517] env[68244]: DEBUG nova.compute.manager [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Received event network-changed-98fde36b-000a-43da-ac75-ac997cb773c2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1049.631708] env[68244]: DEBUG nova.compute.manager [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Refreshing instance network info cache due to event network-changed-98fde36b-000a-43da-ac75-ac997cb773c2. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1049.631920] env[68244]: DEBUG oslo_concurrency.lockutils [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] Acquiring lock "refresh_cache-cd2c4986-2092-4bc5-94c6-222f036c5e83" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.632082] env[68244]: DEBUG oslo_concurrency.lockutils [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] Acquired lock "refresh_cache-cd2c4986-2092-4bc5-94c6-222f036c5e83" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.632246] env[68244]: DEBUG nova.network.neutron [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Refreshing network info cache for port 98fde36b-000a-43da-ac75-ac997cb773c2 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1049.647016] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.649989] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.510s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.651260] env[68244]: INFO nova.compute.claims [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1049.672657] env[68244]: INFO nova.scheduler.client.report [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted allocations for instance f579141b-1fac-4541-99c3-07644a0a358c [ 1049.800029] env[68244]: WARNING nova.compute.manager [None req-a8202b75-e432-43f8-9c78-f2b803b2d810 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Image not found during snapshot: nova.exception.ImageNotFound: Image 395a64e8-14c2-4cdf-b6d3-b69bfb75d020 could not be found. [ 1049.878487] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.878719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.878931] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.879126] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.879376] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.881509] env[68244]: INFO nova.compute.manager [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Terminating instance [ 1050.059823] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1050.059823] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523b3ea8-9857-b1c1-7029-3949b5a9f5de" [ 1050.059823] env[68244]: _type = "HttpNfcLease" [ 1050.059823] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1050.060049] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1050.060049] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523b3ea8-9857-b1c1-7029-3949b5a9f5de" [ 1050.060049] env[68244]: _type = "HttpNfcLease" [ 1050.060049] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1050.060645] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365c8486-c54f-48cc-8596-1efe1213fe33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.068522] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa0fae-c73d-181d-2e30-fe18f6f744e4/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1050.068831] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa0fae-c73d-181d-2e30-fe18f6f744e4/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1050.139823] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780855, 'name': CreateVM_Task, 'duration_secs': 0.36483} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.140052] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1050.140736] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.140899] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.141227] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1050.141487] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8393f805-8772-4250-9b72-fe08d635856b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.147504] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1050.147504] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528c203a-d2ab-d6c4-915c-ffb86a58b3c4" [ 1050.147504] env[68244]: _type = "Task" [ 1050.147504] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.160144] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528c203a-d2ab-d6c4-915c-ffb86a58b3c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.181126] env[68244]: DEBUG oslo_concurrency.lockutils [None req-36daa8d6-d448-4911-bb87-dd048663a12a tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "f579141b-1fac-4541-99c3-07644a0a358c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.851s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.185297] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3aa775f5-934b-4e70-aa4d-2b1ac7011495 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.354446] env[68244]: DEBUG nova.network.neutron [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Updated VIF entry in instance network info cache for port 98fde36b-000a-43da-ac75-ac997cb773c2. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1050.355493] env[68244]: DEBUG nova.network.neutron [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Updating instance_info_cache with network_info: [{"id": "98fde36b-000a-43da-ac75-ac997cb773c2", "address": "fa:16:3e:cb:72:e2", "network": {"id": "8fa3d118-d4cb-43a8-adc1-dd9b1fd3945b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a5e968a8ae42464696bc8ffe1ee86197", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fde36b-00", "ovs_interfaceid": "98fde36b-000a-43da-ac75-ac997cb773c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.385251] env[68244]: DEBUG nova.compute.manager [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1050.385489] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.386390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6638c69-7d15-4ca3-acad-01ab9516c10f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.398520] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.398784] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd88031b-8320-47af-943f-7be1e796e6fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.408138] env[68244]: DEBUG oslo_vmware.api [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 1050.408138] env[68244]: value = "task-2780856" [ 1050.408138] env[68244]: _type = "Task" [ 1050.408138] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.417976] env[68244]: DEBUG oslo_vmware.api [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780856, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.527126] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.528250] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35ee07e2-b316-4e74-ba43-c519818c041d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.551504] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1050.551504] env[68244]: value = "task-2780857" [ 1050.551504] env[68244]: _type = "Task" [ 1050.551504] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.561753] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780857, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.660471] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528c203a-d2ab-d6c4-915c-ffb86a58b3c4, 'name': SearchDatastore_Task, 'duration_secs': 0.010936} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.660994] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.661325] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.661686] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.661849] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.662048] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.665495] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccd5c1ca-da8f-4972-a292-766cd9c12608 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.678419] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.678419] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.679205] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-006b5e61-ab46-4f5a-a543-12f4abeb82ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.685864] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1050.685864] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5226f798-4619-d3c3-cedf-b9d64f72c4dc" [ 1050.685864] env[68244]: _type = "Task" [ 1050.685864] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.697431] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5226f798-4619-d3c3-cedf-b9d64f72c4dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.859317] env[68244]: DEBUG oslo_concurrency.lockutils [req-4d8489a9-d0cf-4d50-af87-e071803fef5e req-93a2eb44-74c9-4bc7-8dd8-724ec6183fa6 service nova] Releasing lock "refresh_cache-cd2c4986-2092-4bc5-94c6-222f036c5e83" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.921095] env[68244]: DEBUG oslo_vmware.api [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780856, 'name': PowerOffVM_Task, 'duration_secs': 0.315279} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.924261] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.924567] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.926090] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b5236f9-4632-430a-a57a-ba3214a97529 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.003776] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.004028] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.004210] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Deleting the datastore file [datastore2] ffa17045-fadf-47d7-9c3b-19d0d54de3fc {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.004798] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e84e29f-18ca-46c1-91b6-0e7ef01fa878 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.018747] env[68244]: DEBUG oslo_vmware.api [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for the task: (returnval){ [ 1051.018747] env[68244]: value = "task-2780859" [ 1051.018747] env[68244]: _type = "Task" [ 1051.018747] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.775142] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.775461] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.775797] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.776007] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.776307] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.780412] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.780689] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.780915] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.781153] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.781359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.783390] env[68244]: INFO nova.compute.manager [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Terminating instance [ 1051.794161] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af18aefd-1984-4b21-b7f4-4a888ff441ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.800346] env[68244]: DEBUG oslo_vmware.api [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Task: {'id': task-2780859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281673} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.802308] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.803042] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.803042] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.803042] env[68244]: INFO nova.compute.manager [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Took 1.42 seconds to destroy the instance on the hypervisor. [ 1051.803954] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.805400] env[68244]: DEBUG nova.compute.manager [-] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1051.805859] env[68244]: DEBUG nova.network.neutron [-] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1051.829316] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance '9658b4e0-f4f9-4628-b700-19d94800961c' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1051.833406] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f78b2b0-1e18-411b-9d97-c2504bf541ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.836200] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780857, 'name': PowerOffVM_Task, 'duration_secs': 0.232218} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.836467] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5226f798-4619-d3c3-cedf-b9d64f72c4dc, 'name': SearchDatastore_Task, 'duration_secs': 0.014298} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.837852] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.839985] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2351d7-ca14-435f-80ea-05d2b63bdef1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.843025] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c3dbc72-4dce-4935-a543-56d197360a82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.849329] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ea9609-335c-4994-8f91-b59368a4c1a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.868474] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1051.868474] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f06dbf-ee89-9bec-9ea7-d261de8d18cb" [ 1051.868474] env[68244]: _type = "Task" [ 1051.868474] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.870224] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa22962-cdfe-4517-96d4-63cf3b8f435b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.906581] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a411ad90-bd0f-4409-a480-85a7275ef50a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.921553] env[68244]: INFO nova.compute.manager [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Terminating instance [ 1051.923796] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f06dbf-ee89-9bec-9ea7-d261de8d18cb, 'name': SearchDatastore_Task, 'duration_secs': 0.015564} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.925483] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.926228] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cd2c4986-2092-4bc5-94c6-222f036c5e83/cd2c4986-2092-4bc5-94c6-222f036c5e83.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1051.929073] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-333d8014-6c28-4e51-95c0-ab075c95a3ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.932575] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5fdf9f-3abb-4674-813a-bfcdaabdd9bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.951012] env[68244]: DEBUG nova.compute.provider_tree [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.953995] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1051.953995] env[68244]: value = "task-2780860" [ 1051.953995] env[68244]: _type = "Task" [ 1051.953995] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.964104] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.302461] env[68244]: DEBUG nova.compute.manager [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1052.302948] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1052.304224] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f7f4b0-21f3-43c9-b7ea-4010305a13a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.314921] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.319210] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b59c77e-4f80-4cc0-a8da-225c0180a6f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.326808] env[68244]: DEBUG oslo_vmware.api [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1052.326808] env[68244]: value = "task-2780861" [ 1052.326808] env[68244]: _type = "Task" [ 1052.326808] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.339157] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.342483] env[68244]: DEBUG oslo_vmware.api [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.342483] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2eaa8d6-55d3-441f-a391-d5faafb43adc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.349125] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1052.349125] env[68244]: value = "task-2780862" [ 1052.349125] env[68244]: _type = "Task" [ 1052.349125] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.357952] env[68244]: DEBUG nova.compute.manager [req-6545eee4-27e9-4d43-9aff-bfea10cf5a77 req-6f9abd61-d4d2-4b55-87b9-3dcc363a6656 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Received event network-vif-deleted-2a529edd-e384-4bf7-8ab6-a868cc9e2788 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1052.358395] env[68244]: INFO nova.compute.manager [req-6545eee4-27e9-4d43-9aff-bfea10cf5a77 req-6f9abd61-d4d2-4b55-87b9-3dcc363a6656 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Neutron deleted interface 2a529edd-e384-4bf7-8ab6-a868cc9e2788; detaching it from the instance and deleting it from the info cache [ 1052.358722] env[68244]: DEBUG nova.network.neutron [req-6545eee4-27e9-4d43-9aff-bfea10cf5a77 req-6f9abd61-d4d2-4b55-87b9-3dcc363a6656 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.368362] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780862, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.426443] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1052.426652] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-31c7b983-cdf4-4ff1-971f-3f42a2147773 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.430036] env[68244]: DEBUG nova.compute.manager [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1052.430036] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1052.431385] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f694f82-b92f-4464-a6e9-2afa82ba791a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.441907] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.443482] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80091a56-f1ec-44a7-9ebf-409c837b2acc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.445720] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1052.445720] env[68244]: value = "task-2780863" [ 1052.445720] env[68244]: _type = "Task" [ 1052.445720] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.456908] env[68244]: DEBUG nova.scheduler.client.report [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.460735] env[68244]: DEBUG oslo_vmware.api [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 1052.460735] env[68244]: value = "task-2780864" [ 1052.460735] env[68244]: _type = "Task" [ 1052.460735] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.472502] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780863, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.488246] env[68244]: DEBUG oslo_vmware.api [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.495014] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780860, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.832894] env[68244]: DEBUG nova.network.neutron [-] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.840325] env[68244]: DEBUG oslo_vmware.api [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780861, 'name': PowerOffVM_Task, 'duration_secs': 0.390398} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.840925] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.840925] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.841190] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21733f32-b801-4494-bae0-b58960de9d46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.861546] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780862, 'name': PowerOffVM_Task, 'duration_secs': 0.476504} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.863048] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.863048] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance '9658b4e0-f4f9-4628-b700-19d94800961c' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1052.867049] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b542392b-42ff-4602-9f8b-dc24be7af953 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.877334] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcda04d-f27f-41dd-9324-1066cb2a37b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.915871] env[68244]: DEBUG nova.compute.manager [req-6545eee4-27e9-4d43-9aff-bfea10cf5a77 req-6f9abd61-d4d2-4b55-87b9-3dcc363a6656 service nova] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Detach interface failed, port_id=2a529edd-e384-4bf7-8ab6-a868cc9e2788, reason: Instance ffa17045-fadf-47d7-9c3b-19d0d54de3fc could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1052.918627] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.918885] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.919188] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleting the datastore file [datastore1] fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.919756] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60df4f14-0b70-4e44-ae66-4221134674f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.928594] env[68244]: DEBUG oslo_vmware.api [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for the task: (returnval){ [ 1052.928594] env[68244]: value = "task-2780866" [ 1052.928594] env[68244]: _type = "Task" [ 1052.928594] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.939539] env[68244]: DEBUG oslo_vmware.api [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.956441] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780863, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.964544] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.315s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.965077] env[68244]: DEBUG nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1052.967959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.755s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.968304] env[68244]: DEBUG nova.objects.instance [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lazy-loading 'resources' on Instance uuid 92ce8150-982b-4669-b27a-4afd5c85da86 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.983272] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780860, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.772689} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.984225] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cd2c4986-2092-4bc5-94c6-222f036c5e83/cd2c4986-2092-4bc5-94c6-222f036c5e83.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1052.984868] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1052.984868] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13d45aff-26a8-4bb6-81fa-9108093ecefa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.992406] env[68244]: DEBUG oslo_vmware.api [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780864, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.999280] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1052.999280] env[68244]: value = "task-2780867" [ 1052.999280] env[68244]: _type = "Task" [ 1052.999280] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.010432] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.335318] env[68244]: INFO nova.compute.manager [-] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Took 1.53 seconds to deallocate network for instance. [ 1053.370109] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1053.370395] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.370563] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1053.370755] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.370900] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1053.371062] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1053.371269] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1053.371427] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1053.371623] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1053.371793] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1053.371967] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1053.377642] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f44ae4d7-24a8-436a-845d-70a94b2a32af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.395067] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1053.395067] env[68244]: value = "task-2780868" [ 1053.395067] env[68244]: _type = "Task" [ 1053.395067] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.404582] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.438959] env[68244]: DEBUG oslo_vmware.api [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Task: {'id': task-2780866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.333906} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.439240] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1053.439460] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1053.439647] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1053.439823] env[68244]: INFO nova.compute.manager [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1053.440076] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.440275] env[68244]: DEBUG nova.compute.manager [-] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1053.440370] env[68244]: DEBUG nova.network.neutron [-] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1053.456862] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780863, 'name': CreateSnapshot_Task, 'duration_secs': 0.704934} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.457094] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1053.457937] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4626bf47-df8a-436a-b8e0-21266806f512 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.472032] env[68244]: DEBUG nova.compute.utils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1053.475463] env[68244]: DEBUG nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1053.475566] env[68244]: DEBUG nova.network.neutron [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1053.488011] env[68244]: DEBUG oslo_vmware.api [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780864, 'name': PowerOffVM_Task, 'duration_secs': 0.539634} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.488452] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1053.488452] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1053.488720] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b50ef19c-f11a-43da-915a-08ccca414b36 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.512924] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072662} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.513240] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1053.516412] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f829f5-2c3b-41aa-8a9b-be2013c56890 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.542045] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] cd2c4986-2092-4bc5-94c6-222f036c5e83/cd2c4986-2092-4bc5-94c6-222f036c5e83.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.543767] env[68244]: DEBUG nova.policy [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '813e863e39a449dd915ef45aa553cdab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '207109eb01bd42b081cc66385789ab80', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1053.549311] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce1198a0-e0cd-44cc-8e69-5b5519a5eb17 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.572918] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1053.572918] env[68244]: value = "task-2780870" [ 1053.572918] env[68244]: _type = "Task" [ 1053.572918] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.584319] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1053.584609] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1053.584817] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleting the datastore file [datastore2] cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1053.585126] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1725c4f8-8c9f-46c7-ac79-f70eb1a3749b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.590875] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780870, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.598728] env[68244]: DEBUG oslo_vmware.api [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for the task: (returnval){ [ 1053.598728] env[68244]: value = "task-2780871" [ 1053.598728] env[68244]: _type = "Task" [ 1053.598728] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.610251] env[68244]: DEBUG oslo_vmware.api [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.844204] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.910665] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780868, 'name': ReconfigVM_Task, 'duration_secs': 0.39208} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.912069] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance '9658b4e0-f4f9-4628-b700-19d94800961c' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1053.917506] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13db94bd-7cec-4c21-a158-c66cda525691 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.928931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9a12fa-602b-4333-a532-f2734e285d6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.962655] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eecf48f-8330-4cf0-8a63-bff336555675 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.978222] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1053.979287] env[68244]: DEBUG nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1053.981957] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6ab5f4d0-fcbc-4509-b21c-173adcad73dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.986329] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61de8df3-8364-4292-9117-d242ad7f8461 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.002612] env[68244]: DEBUG nova.compute.provider_tree [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.006059] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1054.006059] env[68244]: value = "task-2780872" [ 1054.006059] env[68244]: _type = "Task" [ 1054.006059] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.010936] env[68244]: DEBUG nova.network.neutron [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Successfully created port: 9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1054.019944] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780872, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.085025] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780870, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.111172] env[68244]: DEBUG oslo_vmware.api [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Task: {'id': task-2780871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429249} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.111470] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.111707] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1054.111914] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1054.112113] env[68244]: INFO nova.compute.manager [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1054.112375] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1054.112575] env[68244]: DEBUG nova.compute.manager [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1054.112683] env[68244]: DEBUG nova.network.neutron [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1054.406570] env[68244]: DEBUG nova.network.neutron [-] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.425878] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1054.426067] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.426238] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1054.426492] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.426621] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1054.426705] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1054.426908] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1054.427098] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1054.427289] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1054.427467] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1054.427646] env[68244]: DEBUG nova.virt.hardware [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1054.433554] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Reconfiguring VM instance instance-0000004b to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1054.434856] env[68244]: DEBUG nova.compute.manager [req-65dd80c0-8cb1-4f0a-8848-5c23140cfe3c req-42853643-400c-4067-99e7-03199d8e9baa service nova] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Received event network-vif-deleted-30943db2-6cce-4a90-9e45-5cf14eb6e799 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1054.436144] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56ad31fa-7739-44a1-8e83-2e7308477ef1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.458034] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1054.458034] env[68244]: value = "task-2780873" [ 1054.458034] env[68244]: _type = "Task" [ 1054.458034] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.468396] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780873, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.508401] env[68244]: DEBUG nova.scheduler.client.report [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.523164] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780872, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.585190] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780870, 'name': ReconfigVM_Task, 'duration_secs': 0.660507} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.585485] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Reconfigured VM instance instance-00000053 to attach disk [datastore2] cd2c4986-2092-4bc5-94c6-222f036c5e83/cd2c4986-2092-4bc5-94c6-222f036c5e83.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.586163] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e67ee46a-5412-4e45-8a87-5e6fd6adc70e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.596392] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1054.596392] env[68244]: value = "task-2780874" [ 1054.596392] env[68244]: _type = "Task" [ 1054.596392] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.607309] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780874, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.908914] env[68244]: INFO nova.compute.manager [-] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Took 1.47 seconds to deallocate network for instance. [ 1054.969984] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780873, 'name': ReconfigVM_Task, 'duration_secs': 0.22573} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.970313] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Reconfigured VM instance instance-0000004b to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1054.971114] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be796ab3-8657-454e-a568-e48a95c81d7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.996385] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 9658b4e0-f4f9-4628-b700-19d94800961c/9658b4e0-f4f9-4628-b700-19d94800961c.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.997721] env[68244]: DEBUG nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1054.999811] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-917f070c-29f5-4869-bd13-f418e66a8922 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.013650] env[68244]: DEBUG nova.network.neutron [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.019229] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.021418] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.659s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.021688] env[68244]: DEBUG nova.objects.instance [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lazy-loading 'resources' on Instance uuid ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.025568] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1055.025568] env[68244]: value = "task-2780875" [ 1055.025568] env[68244]: _type = "Task" [ 1055.025568] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.034899] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780872, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.041549] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780875, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.043821] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1055.044114] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1055.044229] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1055.044413] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1055.044555] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1055.044698] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1055.044908] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1055.045086] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1055.045262] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1055.045421] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1055.045594] env[68244]: DEBUG nova.virt.hardware [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1055.046748] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf2a94b-1a8c-4351-9d9e-0a4df9f5fdc6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.052655] env[68244]: INFO nova.scheduler.client.report [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Deleted allocations for instance 92ce8150-982b-4669-b27a-4afd5c85da86 [ 1055.062205] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa364951-98e5-4cd8-b5d8-5469b78caa07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.106016] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780874, 'name': Rename_Task, 'duration_secs': 0.222954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.107067] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.107321] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b6bb714-24bb-4ff5-8f11-6eeeb26849a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.115374] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1055.115374] env[68244]: value = "task-2780876" [ 1055.115374] env[68244]: _type = "Task" [ 1055.115374] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.126047] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780876, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.416659] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.519665] env[68244]: INFO nova.compute.manager [-] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Took 1.41 seconds to deallocate network for instance. [ 1055.525292] env[68244]: DEBUG nova.objects.instance [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lazy-loading 'numa_topology' on Instance uuid ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.530087] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780872, 'name': CloneVM_Task, 'duration_secs': 1.253147} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.533667] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Created linked-clone VM from snapshot [ 1055.535064] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e76ef27-5330-46aa-bd65-077416b876af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.544664] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780875, 'name': ReconfigVM_Task, 'duration_secs': 0.490242} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.548421] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 9658b4e0-f4f9-4628-b700-19d94800961c/9658b4e0-f4f9-4628-b700-19d94800961c.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.548821] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance '9658b4e0-f4f9-4628-b700-19d94800961c' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.552562] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Uploading image a370b67b-27dc-4eb4-8e12-ce720e17af0d {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1055.564057] env[68244]: DEBUG oslo_concurrency.lockutils [None req-98c9d79d-1761-49f1-8472-59118a3694b6 tempest-AttachInterfacesUnderV243Test-1245714517 tempest-AttachInterfacesUnderV243Test-1245714517-project-member] Lock "92ce8150-982b-4669-b27a-4afd5c85da86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.098s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.584330] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1055.584330] env[68244]: value = "vm-559102" [ 1055.584330] env[68244]: _type = "VirtualMachine" [ 1055.584330] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1055.584918] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d8ec4e1e-40f6-4f20-a682-d0e2fb0374f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.598277] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease: (returnval){ [ 1055.598277] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52359991-38f7-f181-e51c-431b48a860a0" [ 1055.598277] env[68244]: _type = "HttpNfcLease" [ 1055.598277] env[68244]: } obtained for exporting VM: (result){ [ 1055.598277] env[68244]: value = "vm-559102" [ 1055.598277] env[68244]: _type = "VirtualMachine" [ 1055.598277] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1055.599157] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the lease: (returnval){ [ 1055.599157] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52359991-38f7-f181-e51c-431b48a860a0" [ 1055.599157] env[68244]: _type = "HttpNfcLease" [ 1055.599157] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1055.609873] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1055.609873] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52359991-38f7-f181-e51c-431b48a860a0" [ 1055.609873] env[68244]: _type = "HttpNfcLease" [ 1055.609873] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1055.627193] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780876, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.702120] env[68244]: DEBUG nova.network.neutron [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Successfully updated port: 9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1056.028104] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.031888] env[68244]: DEBUG nova.objects.base [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1056.059908] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217389af-925c-4136-bef6-98e2a06c9fa0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.085032] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594450fd-4fa4-4270-9b41-74ba437b26f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.106382] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance '9658b4e0-f4f9-4628-b700-19d94800961c' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1056.122087] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1056.122087] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52359991-38f7-f181-e51c-431b48a860a0" [ 1056.122087] env[68244]: _type = "HttpNfcLease" [ 1056.122087] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1056.122907] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1056.122907] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52359991-38f7-f181-e51c-431b48a860a0" [ 1056.122907] env[68244]: _type = "HttpNfcLease" [ 1056.122907] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1056.123809] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9fca94-db6e-46a5-98b4-afd59d999be8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.129466] env[68244]: DEBUG oslo_vmware.api [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780876, 'name': PowerOnVM_Task, 'duration_secs': 0.614345} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.132347] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.132558] env[68244]: INFO nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1056.132769] env[68244]: DEBUG nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.133780] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f438c87-3be4-4a41-8a6d-d142aa1a3d52 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.139418] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e7d8f-8198-5d13-a965-2315d316b886/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1056.139590] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e7d8f-8198-5d13-a965-2315d316b886/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1056.211281] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.211441] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.211599] env[68244]: DEBUG nova.network.neutron [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.295210] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3df0a153-2e90-4742-a116-a1a89e822c75 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.441320] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66e08c8-451e-455b-93be-74732e78c6a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.450062] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd2d797-6f7c-4ca8-ab67-0a98369d76bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.483997] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752ffac5-8015-431c-863a-ce4c8ef32e04 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.488066] env[68244]: DEBUG nova.compute.manager [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Received event network-vif-deleted-16438b13-f5f7-472e-af75-2da5ea4e4568 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1056.488405] env[68244]: DEBUG nova.compute.manager [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Received event network-vif-plugged-9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1056.488529] env[68244]: DEBUG oslo_concurrency.lockutils [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] Acquiring lock "df935885-c313-473d-aa3a-ba81aa999554-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.488673] env[68244]: DEBUG oslo_concurrency.lockutils [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] Lock "df935885-c313-473d-aa3a-ba81aa999554-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.488835] env[68244]: DEBUG oslo_concurrency.lockutils [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] Lock "df935885-c313-473d-aa3a-ba81aa999554-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.488993] env[68244]: DEBUG nova.compute.manager [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] No waiting events found dispatching network-vif-plugged-9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1056.489172] env[68244]: WARNING nova.compute.manager [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Received unexpected event network-vif-plugged-9389f00b-7d76-4743-9f6d-d9af08918ce6 for instance with vm_state building and task_state spawning. [ 1056.489334] env[68244]: DEBUG nova.compute.manager [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Received event network-changed-9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1056.489515] env[68244]: DEBUG nova.compute.manager [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Refreshing instance network info cache due to event network-changed-9389f00b-7d76-4743-9f6d-d9af08918ce6. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1056.489719] env[68244]: DEBUG oslo_concurrency.lockutils [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] Acquiring lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.497802] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfef9ab-dcb9-4e8d-926d-4677034f9e09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.513029] env[68244]: DEBUG nova.compute.provider_tree [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.688750] env[68244]: DEBUG nova.network.neutron [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Port c5c56d0b-9541-4af9-9b67-3e468da9557f binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1056.725362] env[68244]: INFO nova.compute.manager [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Took 39.42 seconds to build instance. [ 1056.767121] env[68244]: DEBUG nova.network.neutron [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1057.000092] env[68244]: DEBUG nova.network.neutron [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.018032] env[68244]: DEBUG nova.scheduler.client.report [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.236036] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2faec779-23a6-484b-9be0-f7a3945da5a0 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.941s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.506304] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.506304] env[68244]: DEBUG nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Instance network_info: |[{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1057.506304] env[68244]: DEBUG oslo_concurrency.lockutils [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] Acquired lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.506304] env[68244]: DEBUG nova.network.neutron [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Refreshing network info cache for port 9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1057.506304] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:cf:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9389f00b-7d76-4743-9f6d-d9af08918ce6', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1057.522972] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.526031] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df935885-c313-473d-aa3a-ba81aa999554] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1057.526973] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e088ec1e-5a05-4105-96d3-cbc5cf651d65 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.546068] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.525s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.549325] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.550s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.549739] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.552273] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1057.552273] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.664s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.552273] env[68244]: INFO nova.compute.claims [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.557405] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef52484-a1e6-42bb-aa5b-a192427ffde8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.562967] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1057.562967] env[68244]: value = "task-2780878" [ 1057.562967] env[68244]: _type = "Task" [ 1057.562967] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.571575] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccd5145-e048-4118-b923-8c3305645b0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.581669] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780878, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.598979] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c17ea1-5ba6-4aa8-ad40-d44aacd34105 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.610767] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cba7e4-626a-426c-a51b-1f46f65f9256 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.648217] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178454MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1057.648553] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.721413] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.721942] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.722049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.883136] env[68244]: DEBUG nova.network.neutron [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updated VIF entry in instance network info cache for port 9389f00b-7d76-4743-9f6d-d9af08918ce6. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.883323] env[68244]: DEBUG nova.network.neutron [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.945960] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "cd2c4986-2092-4bc5-94c6-222f036c5e83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.948800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.948800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "cd2c4986-2092-4bc5-94c6-222f036c5e83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.948800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.948800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.949920] env[68244]: INFO nova.compute.manager [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Terminating instance [ 1058.071172] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8ea179f7-150d-4b4b-987a-0fb49918f659 tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 46.868s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.071172] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 24.652s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.071172] env[68244]: INFO nova.compute.manager [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Unshelving [ 1058.086231] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780878, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.386128] env[68244]: DEBUG oslo_concurrency.lockutils [req-bef65d5e-0a85-435d-878c-2e60e7a56b25 req-46a72ca4-7ece-4a10-8941-3eb7dba930dc service nova] Releasing lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.458094] env[68244]: DEBUG nova.compute.manager [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.458392] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.459434] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b43483a-39f5-4a04-8ce2-fb40372f95db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.470387] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.470717] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4431d2a6-0355-4e99-b264-5ece4cda20fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.479922] env[68244]: DEBUG oslo_vmware.api [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1058.479922] env[68244]: value = "task-2780879" [ 1058.479922] env[68244]: _type = "Task" [ 1058.479922] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.489945] env[68244]: DEBUG oslo_vmware.api [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.575629] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780878, 'name': CreateVM_Task, 'duration_secs': 0.570512} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.576013] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df935885-c313-473d-aa3a-ba81aa999554] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1058.576892] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.577363] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.577777] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1058.580564] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64d30180-34c9-4336-ba8a-116d655f14a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.589770] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1058.589770] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eb7650-0505-3574-beee-e256f8de9e64" [ 1058.589770] env[68244]: _type = "Task" [ 1058.589770] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.600214] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eb7650-0505-3574-beee-e256f8de9e64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.766705] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.768405] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.768405] env[68244]: DEBUG nova.network.neutron [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.946448] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabbf29f-bf96-4063-8120-cd0010ec150e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.954919] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0f26d7-846e-4583-88de-8b7d3d0ccdf4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.991172] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdd437f-36ca-48d9-92cd-39f73e56a39c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.005043] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4825ccb1-c8a7-4faa-884e-48895dc939c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.009238] env[68244]: DEBUG oslo_vmware.api [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780879, 'name': PowerOffVM_Task, 'duration_secs': 0.287961} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.009554] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.009721] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.010394] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5d503e0-3f29-48ac-a9c3-a9512cd9bfb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.022130] env[68244]: DEBUG nova.compute.provider_tree [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.098662] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.099052] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.099274] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Deleting the datastore file [datastore2] cd2c4986-2092-4bc5-94c6-222f036c5e83 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.103695] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9b70cb5-dffa-4724-bc44-44fd05807735 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.105293] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eb7650-0505-3574-beee-e256f8de9e64, 'name': SearchDatastore_Task, 'duration_secs': 0.016137} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.105589] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.105904] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1059.106157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.106321] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.106502] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1059.108349] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae412b59-454f-479b-a329-7a98b0f059f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.110698] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.112617] env[68244]: DEBUG oslo_vmware.api [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for the task: (returnval){ [ 1059.112617] env[68244]: value = "task-2780881" [ 1059.112617] env[68244]: _type = "Task" [ 1059.112617] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.120150] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1059.120356] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1059.124095] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd8687d9-9954-4efe-bc97-a0aa46cf249a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.127048] env[68244]: DEBUG oslo_vmware.api [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.133060] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1059.133060] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255fe44-e6e1-9fef-bb67-82e45f1bc576" [ 1059.133060] env[68244]: _type = "Task" [ 1059.133060] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.140302] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255fe44-e6e1-9fef-bb67-82e45f1bc576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.525669] env[68244]: DEBUG nova.scheduler.client.report [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1059.591758] env[68244]: DEBUG nova.network.neutron [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance_info_cache with network_info: [{"id": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "address": "fa:16:3e:cb:96:5a", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5c56d0b-95", "ovs_interfaceid": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.623928] env[68244]: DEBUG oslo_vmware.api [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Task: {'id': task-2780881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335551} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.624451] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.624697] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.624890] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.625093] env[68244]: INFO nova.compute.manager [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1059.625437] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.625648] env[68244]: DEBUG nova.compute.manager [-] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.625743] env[68244]: DEBUG nova.network.neutron [-] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.642844] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255fe44-e6e1-9fef-bb67-82e45f1bc576, 'name': SearchDatastore_Task, 'duration_secs': 0.023743} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.643746] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7793919-bec0-4c80-9b4c-77fa307f7121 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.650842] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1059.650842] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e7f3f7-9000-6359-eb4b-566a015ce7c8" [ 1059.650842] env[68244]: _type = "Task" [ 1059.650842] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.661301] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e7f3f7-9000-6359-eb4b-566a015ce7c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.941555] env[68244]: DEBUG nova.compute.manager [req-909d5af9-29f3-4ef3-9ff9-2bc705b9fd72 req-de6ccbec-0a18-461c-8e10-1f7de94b7388 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Received event network-vif-deleted-98fde36b-000a-43da-ac75-ac997cb773c2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1059.942106] env[68244]: INFO nova.compute.manager [req-909d5af9-29f3-4ef3-9ff9-2bc705b9fd72 req-de6ccbec-0a18-461c-8e10-1f7de94b7388 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Neutron deleted interface 98fde36b-000a-43da-ac75-ac997cb773c2; detaching it from the instance and deleting it from the info cache [ 1059.942759] env[68244]: DEBUG nova.network.neutron [req-909d5af9-29f3-4ef3-9ff9-2bc705b9fd72 req-de6ccbec-0a18-461c-8e10-1f7de94b7388 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.036032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.036032] env[68244]: DEBUG nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1060.037387] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.523s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.037857] env[68244]: DEBUG nova.objects.instance [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lazy-loading 'resources' on Instance uuid 0c336f72-1cb9-468a-bf59-b0de937e1e94 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.095098] env[68244]: DEBUG oslo_concurrency.lockutils [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.165261] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e7f3f7-9000-6359-eb4b-566a015ce7c8, 'name': SearchDatastore_Task, 'duration_secs': 0.025083} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.165567] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.165841] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1060.166147] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8efe989-e119-48fb-a277-e2d5e669f7ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.177043] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1060.177043] env[68244]: value = "task-2780882" [ 1060.177043] env[68244]: _type = "Task" [ 1060.177043] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.189284] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.362599] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa0fae-c73d-181d-2e30-fe18f6f744e4/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1060.363763] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc193d3a-df41-40e0-adac-602241f828b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.369749] env[68244]: DEBUG nova.network.neutron [-] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.373239] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa0fae-c73d-181d-2e30-fe18f6f744e4/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1060.373565] env[68244]: ERROR oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa0fae-c73d-181d-2e30-fe18f6f744e4/disk-0.vmdk due to incomplete transfer. [ 1060.373964] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3344b7cc-c423-48ed-aed9-8e0aa6bddb7a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.384659] env[68244]: DEBUG oslo_vmware.rw_handles [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fa0fae-c73d-181d-2e30-fe18f6f744e4/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1060.384935] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Uploaded image a40fdbaa-a783-493e-82aa-87bca322893c to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1060.389476] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1060.389912] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-65e3bc5e-eeb8-4aab-a4cd-fe636c932617 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.400214] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1060.400214] env[68244]: value = "task-2780883" [ 1060.400214] env[68244]: _type = "Task" [ 1060.400214] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.409900] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780883, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.447731] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f64f7b2e-4491-4e03-821e-37529f00aed0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.461317] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155efc6d-d4e4-4915-a595-7a6490fbc6e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.503356] env[68244]: DEBUG nova.compute.manager [req-909d5af9-29f3-4ef3-9ff9-2bc705b9fd72 req-de6ccbec-0a18-461c-8e10-1f7de94b7388 service nova] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Detach interface failed, port_id=98fde36b-000a-43da-ac75-ac997cb773c2, reason: Instance cd2c4986-2092-4bc5-94c6-222f036c5e83 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1060.541383] env[68244]: DEBUG nova.compute.utils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1060.546211] env[68244]: DEBUG nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1060.626717] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0841a31-0e5c-4f1d-9ea7-aa1aaa62e49d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.652297] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9409ac0-5630-4b67-ae27-9a3447c8fafe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.662906] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance '9658b4e0-f4f9-4628-b700-19d94800961c' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1060.690668] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780882, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.875929] env[68244]: INFO nova.compute.manager [-] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Took 1.25 seconds to deallocate network for instance. [ 1060.922495] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780883, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.964936] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ce54d5-9766-455d-9443-41e0b890a28c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.973982] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa521d5a-c73f-4609-b6dd-2690edd3bafc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.009522] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b566f1-031e-4fb3-a18e-47adfa40c5c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.018875] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c543f1-c1ff-4091-ae67-720c5b8590ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.034017] env[68244]: DEBUG nova.compute.provider_tree [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.046815] env[68244]: DEBUG nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1061.169441] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.169739] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-363e1778-2462-4a43-afac-12e9306fb302 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.178467] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1061.178467] env[68244]: value = "task-2780884" [ 1061.178467] env[68244]: _type = "Task" [ 1061.178467] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.191719] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780882, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617639} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.194774] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1061.195038] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1061.195412] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.195610] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1ff7ad9e-5205-4b1b-b995-0dd586441b9b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.204560] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1061.204560] env[68244]: value = "task-2780885" [ 1061.204560] env[68244]: _type = "Task" [ 1061.204560] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.216791] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780885, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.387609] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.415034] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780883, 'name': Destroy_Task, 'duration_secs': 0.688468} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.415157] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Destroyed the VM [ 1061.416425] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1061.416761] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-30f28889-f6ad-42d6-b5b8-0a87fb289fac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.428173] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1061.428173] env[68244]: value = "task-2780886" [ 1061.428173] env[68244]: _type = "Task" [ 1061.428173] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.441518] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780886, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.537978] env[68244]: DEBUG nova.scheduler.client.report [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.693085] env[68244]: DEBUG oslo_vmware.api [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780884, 'name': PowerOnVM_Task, 'duration_secs': 0.472543} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.693355] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.693525] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-382ba295-09f5-4598-9c31-0512f8c483be tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance '9658b4e0-f4f9-4628-b700-19d94800961c' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1061.723704] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780885, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079067} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.723982] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1061.724839] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31918138-d15e-48ab-9e59-2dfef4af8a6c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.753212] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1061.753995] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-585c547a-0bd4-488d-bc00-50d0163f0cf1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.783741] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1061.783741] env[68244]: value = "task-2780887" [ 1061.783741] env[68244]: _type = "Task" [ 1061.783741] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.796010] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780887, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.938539] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780886, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.045721] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.051216] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.493s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.051490] env[68244]: DEBUG nova.objects.instance [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lazy-loading 'resources' on Instance uuid 246e079b-9fc1-442f-9c20-4e0c05e152e3 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.061697] env[68244]: DEBUG nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1062.077786] env[68244]: INFO nova.scheduler.client.report [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted allocations for instance 0c336f72-1cb9-468a-bf59-b0de937e1e94 [ 1062.097840] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1062.099017] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.099017] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1062.099017] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.099017] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1062.099017] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1062.099017] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1062.099303] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1062.099303] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1062.099443] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1062.099628] env[68244]: DEBUG nova.virt.hardware [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1062.100618] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bd8647-bc7e-4cb7-8fb5-a6d4972a54cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.111398] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e44cba-3918-44f8-abb1-7cf2422a7a5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.131932] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.138271] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Creating folder: Project (4b709fdeee34490cb0bc3daf763f7d4f). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1062.139045] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6735eb78-4eee-48cf-ae96-9b0a53d659b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.153443] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Created folder: Project (4b709fdeee34490cb0bc3daf763f7d4f) in parent group-v558876. [ 1062.153702] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Creating folder: Instances. Parent ref: group-v559104. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1062.154022] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78dcfa93-edda-4dc2-b777-a1396193e918 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.166109] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Created folder: Instances in parent group-v559104. [ 1062.168271] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1062.168271] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.168271] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dbe06fb-ec5e-40ea-875d-259d1f343ef9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.187220] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.187220] env[68244]: value = "task-2780890" [ 1062.187220] env[68244]: _type = "Task" [ 1062.187220] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.197659] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780890, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.297234] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.440620] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780886, 'name': RemoveSnapshot_Task} progress is 44%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.588840] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1de05159-792c-4e81-a41c-70c6aceac3eb tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "0c336f72-1cb9-468a-bf59-b0de937e1e94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.501s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.702248] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780890, 'name': CreateVM_Task, 'duration_secs': 0.47967} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.705914] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1062.712610] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.713147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.713533] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1062.717353] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36cf1b2e-bc1f-4138-97dc-fd7e3bea3b6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.727595] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1062.727595] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52655db5-974f-6dad-86b1-a4ff2ffc7c0e" [ 1062.727595] env[68244]: _type = "Task" [ 1062.727595] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.738625] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52655db5-974f-6dad-86b1-a4ff2ffc7c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.752857] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.754993] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.803076] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780887, 'name': ReconfigVM_Task, 'duration_secs': 0.584561} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.803076] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfigured VM instance instance-00000054 to attach disk [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1062.803076] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-455795c4-0da5-4071-a7b7-096b143ff656 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.812063] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1062.812063] env[68244]: value = "task-2780891" [ 1062.812063] env[68244]: _type = "Task" [ 1062.812063] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.821277] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780891, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.883163] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.883240] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.883945] env[68244]: DEBUG nova.compute.manager [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1062.887959] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d9c283-3be6-4492-a9fe-e35f750c8806 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.897897] env[68244]: DEBUG nova.compute.manager [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1062.898581] env[68244]: DEBUG nova.objects.instance [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'flavor' on Instance uuid a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.926078] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e463de-4ed6-4a7d-9884-7d1f9995d5d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.943897] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021f4368-8c0b-40a7-8e24-4f3c4c7399d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.946381] env[68244]: DEBUG oslo_vmware.api [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780886, 'name': RemoveSnapshot_Task, 'duration_secs': 1.043449} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.947786] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1062.947786] env[68244]: INFO nova.compute.manager [None req-c4988e40-539e-473a-98d9-ebb7bb55f49a tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Took 18.06 seconds to snapshot the instance on the hypervisor. [ 1062.983643] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9f648a-6ed2-4f65-be52-4580025279e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.993783] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb11f95c-32d0-46ee-ac25-7bb94559556b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.012260] env[68244]: DEBUG nova.compute.provider_tree [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.241911] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52655db5-974f-6dad-86b1-a4ff2ffc7c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.019312} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.242252] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.242482] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.242717] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.242853] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.243039] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.243304] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a51bfa8a-cf36-40f5-a4ec-529b8ffcc726 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.257779] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.257779] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.257779] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba21dd3b-1ae1-49ea-a5a5-048491368c8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.259277] env[68244]: DEBUG nova.compute.utils [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1063.264564] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1063.264564] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5265198e-577c-eaac-e275-cf533716069e" [ 1063.264564] env[68244]: _type = "Task" [ 1063.264564] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.273418] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5265198e-577c-eaac-e275-cf533716069e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.323213] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780891, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.388217] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "60c502f4-8c4b-433e-ad4f-9351048abe11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.388460] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.388646] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "60c502f4-8c4b-433e-ad4f-9351048abe11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.388870] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.389055] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.391372] env[68244]: INFO nova.compute.manager [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Terminating instance [ 1063.515348] env[68244]: DEBUG nova.scheduler.client.report [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.762814] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.776145] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5265198e-577c-eaac-e275-cf533716069e, 'name': SearchDatastore_Task, 'duration_secs': 0.017003} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.777053] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26a8a080-0fb2-4e57-810b-956cc409865f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.784200] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1063.784200] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a83c72-8e93-3632-a275-ba8ffb9af901" [ 1063.784200] env[68244]: _type = "Task" [ 1063.784200] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.795074] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a83c72-8e93-3632-a275-ba8ffb9af901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.823309] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780891, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.895443] env[68244]: DEBUG nova.compute.manager [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1063.895622] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.896594] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b476bc42-ebac-4870-b656-e84cf76a0bd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.905601] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.905892] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fc2923e-7e01-4462-9b2d-7859866768d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.907820] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.908061] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75cd4025-0cbe-4e55-a438-43a197eaf7d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.917232] env[68244]: DEBUG oslo_vmware.api [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1063.917232] env[68244]: value = "task-2780893" [ 1063.917232] env[68244]: _type = "Task" [ 1063.917232] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.918815] env[68244]: DEBUG oslo_vmware.api [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1063.918815] env[68244]: value = "task-2780892" [ 1063.918815] env[68244]: _type = "Task" [ 1063.918815] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.934943] env[68244]: DEBUG oslo_vmware.api [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.938728] env[68244]: DEBUG oslo_vmware.api [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780893, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.021204] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.024105] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.850s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.026101] env[68244]: INFO nova.compute.claims [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1064.043152] env[68244]: INFO nova.scheduler.client.report [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Deleted allocations for instance 246e079b-9fc1-442f-9c20-4e0c05e152e3 [ 1064.285227] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.285587] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.298938] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a83c72-8e93-3632-a275-ba8ffb9af901, 'name': SearchDatastore_Task, 'duration_secs': 0.014267} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.299839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.300186] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1064.300555] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6d5b833-3094-4e18-b9ae-f146c6dcc6c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.310535] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1064.310535] env[68244]: value = "task-2780894" [ 1064.310535] env[68244]: _type = "Task" [ 1064.310535] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.322774] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.326277] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780891, 'name': Rename_Task, 'duration_secs': 1.270798} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.326791] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1064.326791] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7547c8a1-a86f-459c-aa38-ec27147d32b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.334576] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1064.334576] env[68244]: value = "task-2780895" [ 1064.334576] env[68244]: _type = "Task" [ 1064.334576] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.348513] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.430134] env[68244]: DEBUG oslo_vmware.api [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780893, 'name': PowerOffVM_Task, 'duration_secs': 0.258722} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.430760] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.430967] env[68244]: DEBUG nova.compute.manager [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.431746] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798ed8f9-b19a-4ba8-bfbc-2ff8709b0000 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.437176] env[68244]: DEBUG oslo_vmware.api [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780892, 'name': PowerOffVM_Task, 'duration_secs': 0.398168} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.437804] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.437979] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.438256] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9434a4a3-57f4-499f-bc16-c147f2996583 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.526192] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.526438] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.526650] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleting the datastore file [datastore2] 60c502f4-8c4b-433e-ad4f-9351048abe11 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.526958] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a4684ad-9d48-40e5-a008-cb14bbcafb6d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.537010] env[68244]: DEBUG oslo_vmware.api [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1064.537010] env[68244]: value = "task-2780897" [ 1064.537010] env[68244]: _type = "Task" [ 1064.537010] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.546991] env[68244]: DEBUG oslo_vmware.api [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780897, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.554177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9f586dc-9dfc-49f3-85ba-142d95d1e471 tempest-ServerTagsTestJSON-1541415858 tempest-ServerTagsTestJSON-1541415858-project-member] Lock "246e079b-9fc1-442f-9c20-4e0c05e152e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.405s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.791818] env[68244]: DEBUG nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1064.824739] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780894, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.843325] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.843542] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.844565] env[68244]: INFO nova.compute.manager [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Attaching volume 26161298-d0e0-46f2-a061-361dc4332cca to /dev/sdb [ 1064.849531] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.885059] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fd0570-54ab-4e41-9b9b-9b60a38ce30b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.894561] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe964a21-472d-4bb7-b1ce-2b3524e9979c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.911606] env[68244]: DEBUG nova.virt.block_device [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating existing volume attachment record: 15c4db25-c390-41bc-abd5-22e9782354cb {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1064.947916] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8512e53c-7b01-425b-b865-d0dc37adfbef tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.048967] env[68244]: DEBUG oslo_vmware.api [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780897, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.153976] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e7d8f-8198-5d13-a965-2315d316b886/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1065.154855] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03aa67f9-d736-4847-972d-e80ad3b856c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.161141] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e7d8f-8198-5d13-a965-2315d316b886/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1065.161520] env[68244]: ERROR oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e7d8f-8198-5d13-a965-2315d316b886/disk-0.vmdk due to incomplete transfer. [ 1065.164054] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-07a8a91c-f1f3-4883-bf10-1c8526796fcd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.172670] env[68244]: DEBUG oslo_vmware.rw_handles [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525e7d8f-8198-5d13-a965-2315d316b886/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1065.172873] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Uploaded image a370b67b-27dc-4eb4-8e12-ce720e17af0d to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1065.176252] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1065.176547] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7c06f6cb-3f3f-4c05-a737-a69b398f4c06 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.189387] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1065.189387] env[68244]: value = "task-2780899" [ 1065.189387] env[68244]: _type = "Task" [ 1065.189387] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.197991] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780899, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.317270] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.325156] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709311} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.325897] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.325897] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.325897] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2350233-2925-4029-9173-7bd4b31b4663 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.337486] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1065.337486] env[68244]: value = "task-2780902" [ 1065.337486] env[68244]: _type = "Task" [ 1065.337486] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.356051] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780902, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.356623] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780895, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.370959] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70830ff1-520c-455c-99b2-a3e06fdd3a50 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.380566] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1194126f-15cb-4133-9e85-ec4711b0a1b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.416224] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3786bfea-624f-45ea-8456-78f348c45ba0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.425907] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57973a35-ab70-43a5-bf3b-b5e0022302a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.441636] env[68244]: DEBUG nova.compute.provider_tree [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.505415] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "9658b4e0-f4f9-4628-b700-19d94800961c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.505677] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.505861] env[68244]: DEBUG nova.compute.manager [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Going to confirm migration 5 {{(pid=68244) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1065.550361] env[68244]: DEBUG oslo_vmware.api [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2780897, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.538114} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.550769] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.550917] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.553141] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.553453] env[68244]: INFO nova.compute.manager [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1065.553769] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.554041] env[68244]: DEBUG nova.compute.manager [-] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.554174] env[68244]: DEBUG nova.network.neutron [-] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.697866] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780899, 'name': Destroy_Task, 'duration_secs': 0.456706} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.698167] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Destroyed the VM [ 1065.698403] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1065.698660] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-260ce35b-ccfc-42b9-8615-079d516cf086 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.712864] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1065.712864] env[68244]: value = "task-2780903" [ 1065.712864] env[68244]: _type = "Task" [ 1065.712864] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.728014] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780903, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.850837] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780902, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12936} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.854286] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.854286] env[68244]: DEBUG oslo_vmware.api [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2780895, 'name': PowerOnVM_Task, 'duration_secs': 1.02674} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.855024] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093efb84-15ef-4d52-a57b-f5f3465512ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.859216] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1065.859216] env[68244]: INFO nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Took 10.86 seconds to spawn the instance on the hypervisor. [ 1065.859216] env[68244]: DEBUG nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1065.860197] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d2753e-27a6-4cc9-a5bd-791804db096b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.885211] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.885587] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86562766-7a46-4f1a-98ae-2c1fb5a1a4b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.910193] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1065.910193] env[68244]: value = "task-2780904" [ 1065.910193] env[68244]: _type = "Task" [ 1065.910193] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.924709] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780904, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.944789] env[68244]: DEBUG nova.scheduler.client.report [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1066.016592] env[68244]: DEBUG nova.compute.manager [req-55c70f6e-d951-4583-a176-52a0cc3662d4 req-f6cd24f3-336a-4e81-b413-e5d9f2529056 service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Received event network-vif-deleted-096eaf97-e1ef-4622-b4ab-5300ed10d060 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1066.016856] env[68244]: INFO nova.compute.manager [req-55c70f6e-d951-4583-a176-52a0cc3662d4 req-f6cd24f3-336a-4e81-b413-e5d9f2529056 service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Neutron deleted interface 096eaf97-e1ef-4622-b4ab-5300ed10d060; detaching it from the instance and deleting it from the info cache [ 1066.017077] env[68244]: DEBUG nova.network.neutron [req-55c70f6e-d951-4583-a176-52a0cc3662d4 req-f6cd24f3-336a-4e81-b413-e5d9f2529056 service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.103262] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.103472] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.103718] env[68244]: DEBUG nova.network.neutron [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.103978] env[68244]: DEBUG nova.objects.instance [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lazy-loading 'info_cache' on Instance uuid 9658b4e0-f4f9-4628-b700-19d94800961c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.214725] env[68244]: DEBUG nova.objects.instance [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'flavor' on Instance uuid a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.227432] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780903, 'name': RemoveSnapshot_Task, 'duration_secs': 0.451296} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.228361] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1066.228952] env[68244]: DEBUG nova.compute.manager [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.230205] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64aec01a-cd48-4455-a994-f183a130eecf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.394951] env[68244]: INFO nova.compute.manager [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Took 39.27 seconds to build instance. [ 1066.423941] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780904, 'name': ReconfigVM_Task, 'duration_secs': 0.349827} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.424580] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.425270] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf7864ff-1fe2-4850-a6fe-8dfad0d8a379 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.433318] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1066.433318] env[68244]: value = "task-2780905" [ 1066.433318] env[68244]: _type = "Task" [ 1066.433318] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.441606] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780905, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.449523] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.451125] env[68244]: DEBUG nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1066.452747] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.609s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.452973] env[68244]: DEBUG nova.objects.instance [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lazy-loading 'resources' on Instance uuid ffa17045-fadf-47d7-9c3b-19d0d54de3fc {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.472838] env[68244]: DEBUG nova.network.neutron [-] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.495952] env[68244]: DEBUG nova.compute.manager [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.497475] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab86d756-b91f-4610-96c6-5736ccab912e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.521038] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56f26175-139c-43c3-a4a5-313c523b16e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.534042] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccb2e1c-62d1-4988-a188-18b94ab183e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.576589] env[68244]: DEBUG nova.compute.manager [req-55c70f6e-d951-4583-a176-52a0cc3662d4 req-f6cd24f3-336a-4e81-b413-e5d9f2529056 service nova] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Detach interface failed, port_id=096eaf97-e1ef-4622-b4ab-5300ed10d060, reason: Instance 60c502f4-8c4b-433e-ad4f-9351048abe11 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1066.724222] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.724222] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.724222] env[68244]: DEBUG nova.network.neutron [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.724222] env[68244]: DEBUG nova.objects.instance [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'info_cache' on Instance uuid a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.751508] env[68244]: INFO nova.compute.manager [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Shelve offloading [ 1066.899417] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6c1a4974-6c4a-4989-9439-b50c5ba43dfe tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.791s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.943543] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780905, 'name': Rename_Task, 'duration_secs': 0.190852} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.944810] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.944810] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35a5e4d2-166d-45d4-8079-680395ab8694 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.953549] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1066.953549] env[68244]: value = "task-2780906" [ 1066.953549] env[68244]: _type = "Task" [ 1066.953549] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.959476] env[68244]: DEBUG nova.compute.utils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1066.963191] env[68244]: DEBUG nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1066.963371] env[68244]: DEBUG nova.network.neutron [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1066.972226] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.983214] env[68244]: INFO nova.compute.manager [-] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Took 1.43 seconds to deallocate network for instance. [ 1067.003560] env[68244]: DEBUG nova.policy [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a36a2057d8245ddb685bd9d1bcc19e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '713d71c9807247308f468c2ef7ede516', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1067.014791] env[68244]: INFO nova.compute.manager [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] instance snapshotting [ 1067.021845] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3877df21-1b6c-4dfc-ac2e-847bf702e28b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.052159] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812f2dd6-5abf-4efe-9322-a4d6353ecdc5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.232485] env[68244]: DEBUG nova.objects.base [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1067.256560] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1067.259072] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-354b4317-c78d-407f-8506-118eed9f4c16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.268509] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1067.268509] env[68244]: value = "task-2780907" [ 1067.268509] env[68244]: _type = "Task" [ 1067.268509] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.286124] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1067.286348] env[68244]: DEBUG nova.compute.manager [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.287183] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337c1527-99ce-42c5-a2ea-805830a57ac3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.294280] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.294508] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.294744] env[68244]: DEBUG nova.network.neutron [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.370148] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d34eb0f-942f-4a00-b388-8fe9cbc83787 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.376497] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb7f088-5f9e-40ba-ae22-79d72a093bbf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.412786] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5b3475-2134-443d-aae4-a8738045ffd7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.421517] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f149d36f-4d73-45aa-bfcd-15de3b329ce5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.437223] env[68244]: DEBUG nova.compute.provider_tree [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.464475] env[68244]: DEBUG nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1067.467268] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780906, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.488695] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.567957] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1067.568311] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ce468ad9-d8d6-47d9-b9c0-f67bfcaa5354 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.577342] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1067.577342] env[68244]: value = "task-2780909" [ 1067.577342] env[68244]: _type = "Task" [ 1067.577342] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.584784] env[68244]: DEBUG nova.network.neutron [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance_info_cache with network_info: [{"id": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "address": "fa:16:3e:cb:96:5a", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5c56d0b-95", "ovs_interfaceid": "c5c56d0b-9541-4af9-9b67-3e468da9557f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.589425] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780909, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.618420] env[68244]: DEBUG nova.network.neutron [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Successfully created port: c01b444b-a422-42c6-8ac3-cf0ce71f1d20 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1067.940520] env[68244]: DEBUG nova.scheduler.client.report [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.974132] env[68244]: DEBUG oslo_vmware.api [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780906, 'name': PowerOnVM_Task, 'duration_secs': 0.631953} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.975260] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.975511] env[68244]: INFO nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1067.975720] env[68244]: DEBUG nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.976559] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d98e355-e1ba-43f9-9845-639010d9ccb5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.051274] env[68244]: DEBUG nova.compute.manager [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Received event network-changed-9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1068.051476] env[68244]: DEBUG nova.compute.manager [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Refreshing instance network info cache due to event network-changed-9389f00b-7d76-4743-9f6d-d9af08918ce6. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1068.051698] env[68244]: DEBUG oslo_concurrency.lockutils [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] Acquiring lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.051900] env[68244]: DEBUG oslo_concurrency.lockutils [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] Acquired lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.052484] env[68244]: DEBUG nova.network.neutron [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Refreshing network info cache for port 9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.090847] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780909, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.091491] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-9658b4e0-f4f9-4628-b700-19d94800961c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.091808] env[68244]: DEBUG nova.objects.instance [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lazy-loading 'migration_context' on Instance uuid 9658b4e0-f4f9-4628-b700-19d94800961c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.442798] env[68244]: DEBUG nova.network.neutron [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.445999] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.993s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.448294] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.032s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.448601] env[68244]: DEBUG nova.objects.instance [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lazy-loading 'resources' on Instance uuid fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.453147] env[68244]: DEBUG nova.network.neutron [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.469129] env[68244]: INFO nova.scheduler.client.report [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Deleted allocations for instance ffa17045-fadf-47d7-9c3b-19d0d54de3fc [ 1068.476882] env[68244]: DEBUG nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1068.498207] env[68244]: INFO nova.compute.manager [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Took 34.63 seconds to build instance. [ 1068.505372] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1068.506412] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1068.506412] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1068.506412] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1068.506412] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1068.506412] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1068.506670] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1068.506711] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1068.507513] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1068.507513] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1068.507513] env[68244]: DEBUG nova.virt.hardware [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1068.508353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52086563-9178-422b-a28a-2f06967ed78e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.519128] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eac0c3b-9459-408a-83ae-e70281cee63e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.588521] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780909, 'name': CreateSnapshot_Task, 'duration_secs': 0.977347} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.591339] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1068.592218] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f4c5bc-1185-456f-8a0c-70f05ac815b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.596032] env[68244]: DEBUG nova.objects.base [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Object Instance<9658b4e0-f4f9-4628-b700-19d94800961c> lazy-loaded attributes: info_cache,migration_context {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1068.596842] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66de2362-b073-407e-870b-2a864f4da223 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.625628] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f8d19f7-bcfc-42f5-82c3-be3e9567d796 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.632358] env[68244]: DEBUG oslo_vmware.api [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1068.632358] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525a4740-d269-33f5-3ee7-c7421baffb54" [ 1068.632358] env[68244]: _type = "Task" [ 1068.632358] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.643615] env[68244]: DEBUG oslo_vmware.api [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525a4740-d269-33f5-3ee7-c7421baffb54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.793524] env[68244]: DEBUG nova.network.neutron [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updated VIF entry in instance network info cache for port 9389f00b-7d76-4743-9f6d-d9af08918ce6. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.793897] env[68244]: DEBUG nova.network.neutron [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.945621] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.955787] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.976601] env[68244]: DEBUG oslo_concurrency.lockutils [None req-257960ff-95e0-4041-9602-ebafc3b493b6 tempest-ServersWithSpecificFlavorTestJSON-1668795449 tempest-ServersWithSpecificFlavorTestJSON-1668795449-project-member] Lock "ffa17045-fadf-47d7-9c3b-19d0d54de3fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.098s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.000544] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d90c0227-194a-4eee-8e68-d6e5f116efd2 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "477da9d1-8550-48be-b243-519b4f0ca443" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.146s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.133257] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1069.136707] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fa3778d6-0608-442c-9292-c4ed61935466 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.151046] env[68244]: DEBUG oslo_vmware.api [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525a4740-d269-33f5-3ee7-c7421baffb54, 'name': SearchDatastore_Task, 'duration_secs': 0.015749} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.155060] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.155552] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1069.155552] env[68244]: value = "task-2780910" [ 1069.155552] env[68244]: _type = "Task" [ 1069.155552] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.167611] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780910, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.254691] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c91fd5c-c7f5-4989-b7a5-58eaad454d05 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.263418] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecc7d4a-ae16-465d-b5d7-c4441e92a861 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.301067] env[68244]: DEBUG oslo_concurrency.lockutils [req-4ce5f5f1-ebd7-481e-accb-75f6488c5ff4 req-82cb11ac-7c82-40ae-b636-12f07f43c222 service nova] Releasing lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.302517] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffca8685-44bf-4259-bb2d-961d5a9d8884 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.310898] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40adacc7-be3f-4718-bc87-079fab94442e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.317151] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.317956] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a877063-1b0f-4fe2-bd24-a8a3560d9110 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.332530] env[68244]: DEBUG nova.compute.provider_tree [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.336165] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.336681] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-528cea45-e7df-48f8-b388-df7ad7ac9682 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.419143] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1069.420396] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1069.420396] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleting the datastore file [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.420396] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70ea2fe7-6b6f-4444-a5e1-2d7484433882 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.428116] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1069.428116] env[68244]: value = "task-2780912" [ 1069.428116] env[68244]: _type = "Task" [ 1069.428116] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.437526] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.464487] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1069.464826] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559108', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'name': 'volume-26161298-d0e0-46f2-a061-361dc4332cca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cedcff81-0010-4fa6-95bf-72a4dcac5427', 'attached_at': '', 'detached_at': '', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'serial': '26161298-d0e0-46f2-a061-361dc4332cca'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1069.465823] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2ce8f6-fef5-489e-a8dd-230727d3326b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.483996] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e003fe03-5d76-49be-8b22-3bf38db46e45 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.510333] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] volume-26161298-d0e0-46f2-a061-361dc4332cca/volume-26161298-d0e0-46f2-a061-361dc4332cca.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1069.510745] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b97be17-cb05-465b-b6d7-586d76d72abe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.530582] env[68244]: INFO nova.compute.manager [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Rebuilding instance [ 1069.532852] env[68244]: DEBUG oslo_vmware.api [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1069.532852] env[68244]: value = "task-2780913" [ 1069.532852] env[68244]: _type = "Task" [ 1069.532852] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.542564] env[68244]: DEBUG oslo_vmware.api [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.584678] env[68244]: DEBUG nova.compute.manager [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1069.585458] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc06126e-0805-459b-93e7-bbf226e3761e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.661580] env[68244]: DEBUG nova.network.neutron [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Successfully updated port: c01b444b-a422-42c6-8ac3-cf0ce71f1d20 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1069.667450] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780910, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.838455] env[68244]: DEBUG nova.scheduler.client.report [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.938616] env[68244]: DEBUG oslo_vmware.api [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2780912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189631} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.938878] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1069.939060] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1069.939239] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1069.950876] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1069.951174] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b002c43d-15f7-4b81-9baa-a65758d19bbd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.960232] env[68244]: INFO nova.scheduler.client.report [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted allocations for instance d46f6695-7a96-4e0b-b43a-236bcb4ec519 [ 1069.964115] env[68244]: DEBUG oslo_vmware.api [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1069.964115] env[68244]: value = "task-2780914" [ 1069.964115] env[68244]: _type = "Task" [ 1069.964115] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.974658] env[68244]: DEBUG oslo_vmware.api [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780914, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.044362] env[68244]: DEBUG oslo_vmware.api [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780913, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.088359] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-vif-unplugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1070.088586] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.088800] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.088983] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.089168] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] No waiting events found dispatching network-vif-unplugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1070.089995] env[68244]: WARNING nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received unexpected event network-vif-unplugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed for instance with vm_state shelved_offloaded and task_state None. [ 1070.089995] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1070.089995] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing instance network info cache due to event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1070.089995] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Acquiring lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.090405] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Acquired lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.090405] env[68244]: DEBUG nova.network.neutron [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.167287] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780910, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.167761] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.167896] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.168047] env[68244]: DEBUG nova.network.neutron [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1070.343471] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.345978] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.318s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.346248] env[68244]: DEBUG nova.objects.instance [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lazy-loading 'resources' on Instance uuid cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.370847] env[68244]: INFO nova.scheduler.client.report [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Deleted allocations for instance fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f [ 1070.466825] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.479766] env[68244]: DEBUG oslo_vmware.api [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2780914, 'name': PowerOnVM_Task, 'duration_secs': 0.469217} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.480211] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1070.480542] env[68244]: DEBUG nova.compute.manager [None req-0334a41c-14e4-42f4-b2e9-fb26fdf4d4ec tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.481750] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf79d4a-95c0-4344-ac37-f2e5a7cace62 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.548065] env[68244]: DEBUG oslo_vmware.api [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780913, 'name': ReconfigVM_Task, 'duration_secs': 0.593096} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.548565] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfigured VM instance instance-00000038 to attach disk [datastore2] volume-26161298-d0e0-46f2-a061-361dc4332cca/volume-26161298-d0e0-46f2-a061-361dc4332cca.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1070.553659] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c84c9b53-765b-4e0c-9ead-1ec42e85b614 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.571790] env[68244]: DEBUG oslo_vmware.api [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1070.571790] env[68244]: value = "task-2780915" [ 1070.571790] env[68244]: _type = "Task" [ 1070.571790] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.581201] env[68244]: DEBUG oslo_vmware.api [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780915, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.607054] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.607054] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5eeac4c-3d66-4e18-84b2-253195450758 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.617020] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1070.617020] env[68244]: value = "task-2780916" [ 1070.617020] env[68244]: _type = "Task" [ 1070.617020] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.626096] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.670020] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780910, 'name': CloneVM_Task, 'duration_secs': 1.165355} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.670020] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Created linked-clone VM from snapshot [ 1070.670020] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d396cb-85d6-4a2e-9bef-5824f964ca3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.679566] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Uploading image ab0db89c-f033-43a3-b630-13407991becf {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1070.709761] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1070.709761] env[68244]: value = "vm-559110" [ 1070.709761] env[68244]: _type = "VirtualMachine" [ 1070.709761] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1070.711018] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f772ee9d-3906-4d06-94e2-5fe1b4e34473 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.722147] env[68244]: DEBUG nova.network.neutron [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1070.724150] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lease: (returnval){ [ 1070.724150] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52347f4a-4c29-89ac-f061-4fcb4ab50817" [ 1070.724150] env[68244]: _type = "HttpNfcLease" [ 1070.724150] env[68244]: } obtained for exporting VM: (result){ [ 1070.724150] env[68244]: value = "vm-559110" [ 1070.724150] env[68244]: _type = "VirtualMachine" [ 1070.724150] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1070.724771] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the lease: (returnval){ [ 1070.724771] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52347f4a-4c29-89ac-f061-4fcb4ab50817" [ 1070.724771] env[68244]: _type = "HttpNfcLease" [ 1070.724771] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1070.732805] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1070.732805] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52347f4a-4c29-89ac-f061-4fcb4ab50817" [ 1070.732805] env[68244]: _type = "HttpNfcLease" [ 1070.732805] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1070.880413] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9ad7342-1e98-4061-bd44-278440497159 tempest-ImagesTestJSON-917860625 tempest-ImagesTestJSON-917860625-project-member] Lock "fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.105s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.987402] env[68244]: DEBUG nova.network.neutron [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Updating instance_info_cache with network_info: [{"id": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "address": "fa:16:3e:af:a3:9c", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01b444b-a4", "ovs_interfaceid": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.051758] env[68244]: DEBUG nova.network.neutron [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updated VIF entry in instance network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1071.052151] env[68244]: DEBUG nova.network.neutron [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.084575] env[68244]: DEBUG oslo_vmware.api [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780915, 'name': ReconfigVM_Task, 'duration_secs': 0.236004} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.084575] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559108', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'name': 'volume-26161298-d0e0-46f2-a061-361dc4332cca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cedcff81-0010-4fa6-95bf-72a4dcac5427', 'attached_at': '', 'detached_at': '', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'serial': '26161298-d0e0-46f2-a061-361dc4332cca'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1071.128115] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780916, 'name': PowerOffVM_Task, 'duration_secs': 0.21192} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.130916] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.131191] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1071.132248] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcff8088-0328-48d8-94ac-7945707066bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.141395] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.141895] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02bec7cc-56db-4fad-a1f1-47a88a83c13e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.159288] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21eca3e-f423-49fa-8a86-9f13a81361c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.167955] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597a9750-ed8c-48ab-b363-6f9590efb167 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.205157] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c012b0-cbf8-4dfa-a1b3-2b5ccbd9e3eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.208262] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1071.208590] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1071.208745] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Deleting the datastore file [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.209040] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92cf4381-fe07-4513-ab11-f40e4151e7ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.218553] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b397d80-b8bd-4ebc-bd4b-e5db27a90e61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.223831] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1071.223831] env[68244]: value = "task-2780919" [ 1071.223831] env[68244]: _type = "Task" [ 1071.223831] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.237047] env[68244]: DEBUG nova.compute.provider_tree [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.249043] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.251640] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1071.251640] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52347f4a-4c29-89ac-f061-4fcb4ab50817" [ 1071.251640] env[68244]: _type = "HttpNfcLease" [ 1071.251640] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1071.251875] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1071.251875] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52347f4a-4c29-89ac-f061-4fcb4ab50817" [ 1071.251875] env[68244]: _type = "HttpNfcLease" [ 1071.251875] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1071.252842] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461dfabd-1d50-4dda-9f63-cd36ebbc1328 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.261885] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5252ab72-2493-6e11-0b0b-6227f44b80b7/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1071.262092] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5252ab72-2493-6e11-0b0b-6227f44b80b7/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1071.390621] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5d3b2fed-b44a-4184-bd9e-adf7b407b448 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.490212] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.490580] env[68244]: DEBUG nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Instance network_info: |[{"id": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "address": "fa:16:3e:af:a3:9c", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01b444b-a4", "ovs_interfaceid": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1071.491043] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:a3:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c01b444b-a422-42c6-8ac3-cf0ce71f1d20', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1071.499745] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Creating folder: Project (713d71c9807247308f468c2ef7ede516). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1071.500301] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96dcfd9d-c087-484a-8a3e-d9a5e8f3df0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.514277] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Created folder: Project (713d71c9807247308f468c2ef7ede516) in parent group-v558876. [ 1071.518030] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Creating folder: Instances. Parent ref: group-v559111. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1071.518342] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-054f96bf-629c-4b0a-b0c3-102abac5354d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.530400] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Created folder: Instances in parent group-v559111. [ 1071.530789] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.531362] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1071.531832] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb072f86-b8b0-4122-af20-a1415a8fc0fc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.555422] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Releasing lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.555581] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Received event network-vif-plugged-c01b444b-a422-42c6-8ac3-cf0ce71f1d20 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1071.555731] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Acquiring lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.555896] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.556071] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.556237] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] No waiting events found dispatching network-vif-plugged-c01b444b-a422-42c6-8ac3-cf0ce71f1d20 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1071.556529] env[68244]: WARNING nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Received unexpected event network-vif-plugged-c01b444b-a422-42c6-8ac3-cf0ce71f1d20 for instance with vm_state building and task_state spawning. [ 1071.556570] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Received event network-changed-c01b444b-a422-42c6-8ac3-cf0ce71f1d20 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1071.560040] env[68244]: DEBUG nova.compute.manager [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Refreshing instance network info cache due to event network-changed-c01b444b-a422-42c6-8ac3-cf0ce71f1d20. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1071.560040] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Acquiring lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.560040] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Acquired lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.560040] env[68244]: DEBUG nova.network.neutron [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Refreshing network info cache for port c01b444b-a422-42c6-8ac3-cf0ce71f1d20 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1071.560040] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1071.560040] env[68244]: value = "task-2780922" [ 1071.560040] env[68244]: _type = "Task" [ 1071.560040] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.569684] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780922, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.736495] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271164} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.737378] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.737378] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.737378] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.741160] env[68244]: DEBUG nova.scheduler.client.report [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1072.074636] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780922, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.130469] env[68244]: DEBUG nova.objects.instance [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lazy-loading 'flavor' on Instance uuid cedcff81-0010-4fa6-95bf-72a4dcac5427 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.251626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.254532] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.606s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.293752] env[68244]: INFO nova.scheduler.client.report [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Deleted allocations for instance cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b [ 1072.312595] env[68244]: DEBUG nova.network.neutron [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Updated VIF entry in instance network info cache for port c01b444b-a422-42c6-8ac3-cf0ce71f1d20. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1072.314756] env[68244]: DEBUG nova.network.neutron [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Updating instance_info_cache with network_info: [{"id": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "address": "fa:16:3e:af:a3:9c", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01b444b-a4", "ovs_interfaceid": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.373418] env[68244]: INFO nova.compute.manager [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Rescuing [ 1072.374129] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.374521] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.374859] env[68244]: DEBUG nova.network.neutron [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1072.532761] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.575039] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780922, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.635599] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d24cc796-b82e-4a25-aa9a-dde59cdc90ed tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.792s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.792790] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1072.793427] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.794102] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.794102] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.794269] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.794358] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1072.794998] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1072.794998] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1072.795144] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1072.795236] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1072.795558] env[68244]: DEBUG nova.virt.hardware [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1072.796602] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bec893-d507-48a3-afee-ec2d682d4f07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.809859] env[68244]: DEBUG oslo_concurrency.lockutils [None req-34a06802-5d64-42f8-aa4a-0e11d32a73d9 tempest-ServersAdminTestJSON-150758922 tempest-ServersAdminTestJSON-150758922-project-member] Lock "cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.029s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.814533] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b12f649-4a22-41cb-aace-eb6761be77e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.817669] env[68244]: DEBUG oslo_concurrency.lockutils [req-13d30aa6-83c0-4bdb-8fda-0917b76d7988 req-6df483f7-b55f-4193-9390-1f7fb7393471 service nova] Releasing lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.830436] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1072.836486] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1072.837243] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1072.837571] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97966a2f-917e-4c1d-a68e-08c01f1971a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.859514] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1072.859514] env[68244]: value = "task-2780923" [ 1072.859514] env[68244]: _type = "Task" [ 1072.859514] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.870686] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780923, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.076311] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780922, 'name': CreateVM_Task, 'duration_secs': 1.511532} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.076732] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1073.077440] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.077687] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.078312] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1073.078537] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-350a1c1b-e78f-436f-a93c-e776aa23ea2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.083633] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1073.083633] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bf6f9b-835f-0767-1e0c-392097f50491" [ 1073.083633] env[68244]: _type = "Task" [ 1073.083633] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.093124] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bf6f9b-835f-0767-1e0c-392097f50491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.281721] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Applying migration context for instance 9658b4e0-f4f9-4628-b700-19d94800961c as it has an incoming, in-progress migration 9d3432d6-d1b5-4178-bedc-2e9fe7a3754c. Migration status is confirming {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1073.283033] env[68244]: INFO nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating resource usage from migration 9d3432d6-d1b5-4178-bedc-2e9fe7a3754c [ 1073.321524] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 2aacd21f-d664-4267-8331-d3862f43d35b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.321764] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e2099d6d-5ab7-4a3e-8034-a3b4fc422749 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.322069] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 7778c027-d4af-436c-a545-aa513c0b1127 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323049] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance cedcff81-0010-4fa6-95bf-72a4dcac5427 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323049] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 60c502f4-8c4b-433e-ad4f-9351048abe11 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1073.323049] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance b84c2c08-651a-407d-89dd-177bc5d90313 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323049] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f2e57bf9-05ee-49d8-846d-c3bf5920ae96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323049] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323049] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323049] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance cd2c4986-2092-4bc5-94c6-222f036c5e83 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1073.323049] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Migration 9d3432d6-d1b5-4178-bedc-2e9fe7a3754c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1073.323358] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 9658b4e0-f4f9-4628-b700-19d94800961c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323358] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance df935885-c313-473d-aa3a-ba81aa999554 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.323430] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 477da9d1-8550-48be-b243-519b4f0ca443 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.378467] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780923, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.596669] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bf6f9b-835f-0767-1e0c-392097f50491, 'name': SearchDatastore_Task, 'duration_secs': 0.011075} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.597000] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.597325] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1073.597530] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.597686] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.597876] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.598436] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e9adaad-d4cd-4aeb-8495-fed9bc36d960 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.609472] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.611269] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1073.612365] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d26b4cfb-9354-4a54-9914-34bcb850231c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.619855] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1073.619855] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526f0451-baab-70a2-4d7a-06a354f86731" [ 1073.619855] env[68244]: _type = "Task" [ 1073.619855] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.629867] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526f0451-baab-70a2-4d7a-06a354f86731, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.654906] env[68244]: DEBUG nova.network.neutron [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.828583] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ed5b8ba3-c8f0-468f-85d1-f36179bfef32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1073.828583] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a50d505f-92f2-4759-ab8f-1bf4c9708b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1073.875023] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780923, 'name': CreateVM_Task, 'duration_secs': 0.638582} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.875023] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1073.875023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.875023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.875023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1073.875023] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6acd5e0e-c6eb-4c77-b7e3-bf8076c14e2b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.880913] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1073.880913] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521c3676-8128-9ec6-f8e6-23dd5a81e53a" [ 1073.880913] env[68244]: _type = "Task" [ 1073.880913] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.890316] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521c3676-8128-9ec6-f8e6-23dd5a81e53a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.136810] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526f0451-baab-70a2-4d7a-06a354f86731, 'name': SearchDatastore_Task, 'duration_secs': 0.01058} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.136810] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d969a88-f2a5-4922-a66e-44c7e7d0e424 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.144181] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1074.144181] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520f3cb6-0dbf-a543-4fed-a963ca913afd" [ 1074.144181] env[68244]: _type = "Task" [ 1074.144181] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.150654] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520f3cb6-0dbf-a543-4fed-a963ca913afd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.158708] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.332512] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance fc75039c-f2d0-4d4b-9a82-b605b6ba63d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.332907] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (b885cb16-3bd4-46d8-abd9-28a1bf1058e3): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1074.333298] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1074.339194] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1074.402172] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521c3676-8128-9ec6-f8e6-23dd5a81e53a, 'name': SearchDatastore_Task, 'duration_secs': 0.014692} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.402172] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.402172] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.402401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.664207] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520f3cb6-0dbf-a543-4fed-a963ca913afd, 'name': SearchDatastore_Task, 'duration_secs': 0.033373} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.664968] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.664968] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/a50d505f-92f2-4759-ab8f-1bf4c9708b1a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1074.668316] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.668710] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.668886] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7deedf5e-ff3c-4c19-ae3a-10de595d37d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.671830] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4940a2af-0436-4a23-ab1f-37e92d46052c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.678294] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2264215-be11-42c0-b565-b84d759cb61a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.683877] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1074.683877] env[68244]: value = "task-2780924" [ 1074.683877] env[68244]: _type = "Task" [ 1074.683877] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.690553] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea67232-3687-4719-a870-8649d9ad628c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.700689] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.700882] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1074.701817] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.705021] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6962ba0c-90b2-46ea-adb0-bbc3a0658e0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.733461] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd733963-9181-412f-8b55-bb2bc8799ebb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.736660] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1074.736660] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52829496-11ca-3f40-2e98-8801d7f7ec6e" [ 1074.736660] env[68244]: _type = "Task" [ 1074.736660] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.745365] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e582573-5dd0-4328-b0a6-7193b6344656 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.753941] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52829496-11ca-3f40-2e98-8801d7f7ec6e, 'name': SearchDatastore_Task, 'duration_secs': 0.028979} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.754561] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ff1f21b-3e0e-4a26-9cb3-591c2df830f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.765740] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.773718] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1074.773718] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529ae140-eb4f-587e-07cf-c58d5bfc6d76" [ 1074.773718] env[68244]: _type = "Task" [ 1074.773718] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.783153] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529ae140-eb4f-587e-07cf-c58d5bfc6d76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.207602] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780924, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.270274] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.287037] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529ae140-eb4f-587e-07cf-c58d5bfc6d76, 'name': SearchDatastore_Task, 'duration_secs': 0.016694} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.287350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.287730] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1075.288127] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cef7ccd7-11d1-49f9-b00f-83cd2b836837 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.302746] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1075.302746] env[68244]: value = "task-2780925" [ 1075.302746] env[68244]: _type = "Task" [ 1075.302746] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.316715] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.700530] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.782137} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.700825] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/a50d505f-92f2-4759-ab8f-1bf4c9708b1a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1075.701165] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1075.701548] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f477d19-35f2-4fbb-82c0-0e78f91f88c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.713707] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.714375] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1075.714375] env[68244]: value = "task-2780926" [ 1075.714375] env[68244]: _type = "Task" [ 1075.714375] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.714375] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e95564d0-5ef8-4978-bf89-0319431ff173 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.731054] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.733101] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1075.733101] env[68244]: value = "task-2780927" [ 1075.733101] env[68244]: _type = "Task" [ 1075.733101] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.744493] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780927, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.782101] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1075.782101] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.527s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.782217] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.672s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.784260] env[68244]: DEBUG nova.objects.instance [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lazy-loading 'pci_requests' on Instance uuid ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.816636] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780925, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.230156] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.261777} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.230721] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.231784] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e086ef1e-4730-4394-a23d-754023e7eed2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.260793] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/a50d505f-92f2-4759-ab8f-1bf4c9708b1a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.264541] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caf18a07-9e4e-4451-9107-cba44ca8cb67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.279824] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780927, 'name': PowerOffVM_Task, 'duration_secs': 0.40961} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.280677] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.282267] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156ce740-704e-4592-86e0-9ca2b2b04adf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.287042] env[68244]: DEBUG nova.objects.instance [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lazy-loading 'numa_topology' on Instance uuid ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.290340] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1076.290340] env[68244]: value = "task-2780928" [ 1076.290340] env[68244]: _type = "Task" [ 1076.290340] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.320884] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c814e3-aa50-4635-9266-6a49dbbbb7af {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.328195] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.336965] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695695} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.341311] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1076.341311] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1076.341311] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3936e352-2c24-4b72-8cee-a92588d706f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.351230] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1076.351230] env[68244]: value = "task-2780929" [ 1076.351230] env[68244]: _type = "Task" [ 1076.351230] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.363779] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780929, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.376036] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.376492] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf035054-3ad0-435f-8923-86bdf67688dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.386534] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1076.386534] env[68244]: value = "task-2780930" [ 1076.386534] env[68244]: _type = "Task" [ 1076.386534] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.399028] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1076.399287] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1076.399589] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.399807] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.400081] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.400393] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03165f3d-4a33-44c4-b864-2416b0ff0e66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.411161] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.411359] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1076.412765] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-869f7aff-6c94-40f2-b9a0-304e3a91d62c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.419302] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1076.419302] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529257f5-525f-4168-fba8-65c79168b2d0" [ 1076.419302] env[68244]: _type = "Task" [ 1076.419302] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.429209] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529257f5-525f-4168-fba8-65c79168b2d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.792298] env[68244]: INFO nova.compute.claims [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.811032] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780928, 'name': ReconfigVM_Task, 'duration_secs': 0.410534} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.811032] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Reconfigured VM instance instance-00000056 to attach disk [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/a50d505f-92f2-4759-ab8f-1bf4c9708b1a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.811475] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2725d636-7a89-45e2-860a-a98bee8a3164 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.823112] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1076.823112] env[68244]: value = "task-2780931" [ 1076.823112] env[68244]: _type = "Task" [ 1076.823112] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.833325] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780931, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.866275] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780929, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085902} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.866605] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.867700] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330223a6-ea4c-4308-8a7e-eaee84c85038 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.901847] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.902899] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ed82c79-2f50-464f-807c-c1dd57aad6c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.938990] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529257f5-525f-4168-fba8-65c79168b2d0, 'name': SearchDatastore_Task, 'duration_secs': 0.013254} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.939292] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1076.939292] env[68244]: value = "task-2780932" [ 1076.939292] env[68244]: _type = "Task" [ 1076.939292] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.940356] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14b8d30f-2370-4f2a-9cfd-9d0d90a0e810 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.949723] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1076.949723] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52494fd6-877e-799d-c914-463a33df22fb" [ 1076.949723] env[68244]: _type = "Task" [ 1076.949723] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.954456] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.963988] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52494fd6-877e-799d-c914-463a33df22fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.335821] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780931, 'name': Rename_Task, 'duration_secs': 0.416234} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.336237] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1077.336513] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e35ce7b4-0a13-4da4-b431-8d1adf7df09d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.344469] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1077.344469] env[68244]: value = "task-2780933" [ 1077.344469] env[68244]: _type = "Task" [ 1077.344469] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.355216] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.451802] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.464996] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52494fd6-877e-799d-c914-463a33df22fb, 'name': SearchDatastore_Task, 'duration_secs': 0.031347} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.465634] env[68244]: DEBUG oslo_concurrency.lockutils [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.465634] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. {{(pid=68244) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1077.465923] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49b21dbc-88c2-485e-9c40-4ba65931f5da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.474989] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1077.474989] env[68244]: value = "task-2780934" [ 1077.474989] env[68244]: _type = "Task" [ 1077.474989] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.485084] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.859885] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780933, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.958528] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780932, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.991045] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780934, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.145377] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d662a2-d3c6-4190-9c65-6399d3c966c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.161024] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ea0cfc-b893-4045-992c-685432eacdcb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.194993] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe0094a-bbb8-4e6c-ba9d-58c8791e4a21 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.206361] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6cb97d-58ef-468d-ae62-3591972f6cfa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.225556] env[68244]: DEBUG nova.compute.provider_tree [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.361414] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780933, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.452867] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780932, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.487071] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666416} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.488148] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. [ 1078.489175] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3380d1-d3fe-4d44-82ce-bb1920c575a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.524072] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.524072] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-111c5a98-324c-4de1-8cd5-dfc67e2069a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.550476] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1078.550476] env[68244]: value = "task-2780935" [ 1078.550476] env[68244]: _type = "Task" [ 1078.550476] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.563839] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780935, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.732354] env[68244]: DEBUG nova.scheduler.client.report [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.858268] env[68244]: DEBUG oslo_vmware.api [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780933, 'name': PowerOnVM_Task, 'duration_secs': 1.134566} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.858556] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1078.862146] env[68244]: INFO nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Took 10.38 seconds to spawn the instance on the hypervisor. [ 1078.862146] env[68244]: DEBUG nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.862146] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0874aa1d-9467-4309-8baa-32d09be23f95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.961447] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780932, 'name': ReconfigVM_Task, 'duration_secs': 1.693035} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.961793] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443/477da9d1-8550-48be-b243-519b4f0ca443.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1078.962821] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdac0731-77bb-41df-9acf-48c8944fdb92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.972056] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1078.972056] env[68244]: value = "task-2780936" [ 1078.972056] env[68244]: _type = "Task" [ 1078.972056] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.989030] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780936, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.065559] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780935, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.238670] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.456s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.241331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.854s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.241557] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.247923] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.929s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.248307] env[68244]: INFO nova.compute.claims [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.285733] env[68244]: INFO nova.scheduler.client.report [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Deleted allocations for instance cd2c4986-2092-4bc5-94c6-222f036c5e83 [ 1079.359551] env[68244]: INFO nova.network.neutron [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating port abbd3e34-9461-4503-86ee-598fe02a65d3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1079.387029] env[68244]: INFO nova.compute.manager [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Took 30.23 seconds to build instance. [ 1079.485408] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780936, 'name': Rename_Task, 'duration_secs': 0.328446} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.485725] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1079.485940] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ffcf45d-b51b-4d9b-9864-69086b0573d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.494354] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1079.494354] env[68244]: value = "task-2780937" [ 1079.494354] env[68244]: _type = "Task" [ 1079.494354] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.507668] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780937, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.566651] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780935, 'name': ReconfigVM_Task, 'duration_secs': 0.576073} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.567199] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfigured VM instance instance-00000038 to attach disk [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.568152] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88640c83-c42a-409f-93e9-3fc462369ae5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.603181] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e075b122-f55a-4a03-9fbb-4999743216c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.622794] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1079.622794] env[68244]: value = "task-2780938" [ 1079.622794] env[68244]: _type = "Task" [ 1079.622794] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.634377] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780938, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.802123] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8860730a-eb00-49a9-af75-6ac9ba5a2101 tempest-ServerDiagnosticsNegativeTest-609288804 tempest-ServerDiagnosticsNegativeTest-609288804-project-member] Lock "cd2c4986-2092-4bc5-94c6-222f036c5e83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.855s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.888743] env[68244]: DEBUG oslo_concurrency.lockutils [None req-715ce219-f28b-413c-8fb8-605065a6a6ff tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.741s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.965869] env[68244]: INFO nova.compute.manager [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Rescuing [ 1079.966292] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.966811] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.966811] env[68244]: DEBUG nova.network.neutron [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.009235] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780937, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.135543] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780938, 'name': ReconfigVM_Task, 'duration_secs': 0.283464} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.136526] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1080.136797] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5cc851e-e4f2-4bee-af75-7892a0b4b5bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.146797] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1080.146797] env[68244]: value = "task-2780939" [ 1080.146797] env[68244]: _type = "Task" [ 1080.146797] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.162016] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.509553] env[68244]: DEBUG oslo_vmware.api [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780937, 'name': PowerOnVM_Task, 'duration_secs': 0.519915} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.510909] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1080.510909] env[68244]: DEBUG nova.compute.manager [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.513644] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444e4a49-f4c6-4024-abd8-b69e0c7df0c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.615717] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72bebd0-8e54-4751-8ba4-113d4cb0b49e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.625388] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dd4a15-ec6b-4ac2-85f2-a7e04b06f79d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.665676] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc4a173-3fc4-41e8-af18-05a2dab28fef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.680104] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37c29c6-1a0f-40fa-a565-8b4b49a10791 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.685121] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780939, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.699927] env[68244]: DEBUG nova.compute.provider_tree [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.998919] env[68244]: DEBUG nova.network.neutron [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Updating instance_info_cache with network_info: [{"id": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "address": "fa:16:3e:af:a3:9c", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01b444b-a4", "ovs_interfaceid": "c01b444b-a422-42c6-8ac3-cf0ce71f1d20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.049478] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.124901] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5252ab72-2493-6e11-0b0b-6227f44b80b7/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1081.128053] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d553027-a85e-44b8-b44d-5c40324f8767 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.137353] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5252ab72-2493-6e11-0b0b-6227f44b80b7/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1081.137622] env[68244]: ERROR oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5252ab72-2493-6e11-0b0b-6227f44b80b7/disk-0.vmdk due to incomplete transfer. [ 1081.137854] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3fb66871-5597-4a96-85ea-16d119585636 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.148120] env[68244]: DEBUG oslo_vmware.rw_handles [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5252ab72-2493-6e11-0b0b-6227f44b80b7/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1081.148120] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Uploaded image ab0db89c-f033-43a3-b630-13407991becf to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1081.150088] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1081.150380] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-001e21b7-77b4-4ef1-aed6-815e60562636 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.161257] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1081.161257] env[68244]: value = "task-2780940" [ 1081.161257] env[68244]: _type = "Task" [ 1081.161257] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.179622] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780940, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.179919] env[68244]: DEBUG oslo_vmware.api [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780939, 'name': PowerOnVM_Task, 'duration_secs': 0.572348} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.180180] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1081.185703] env[68244]: DEBUG nova.compute.manager [None req-846eae60-ca9b-4905-83c0-159b01c42411 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1081.185703] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c085266-20e6-4c33-ae1a-25de8ee6d202 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.205993] env[68244]: DEBUG nova.scheduler.client.report [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1081.506608] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "refresh_cache-a50d505f-92f2-4759-ab8f-1bf4c9708b1a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.676204] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780940, 'name': Destroy_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.714056] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.717839] env[68244]: DEBUG nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1081.718305] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.229s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.718632] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.721597] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.567s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.765016] env[68244]: INFO nova.scheduler.client.report [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted allocations for instance 60c502f4-8c4b-433e-ad4f-9351048abe11 [ 1081.864704] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.864892] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.865781] env[68244]: DEBUG nova.network.neutron [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.931502] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.931502] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.026881] env[68244]: DEBUG nova.compute.manager [req-6aa589fd-ab4b-4aee-9d79-c4e313f12c23 req-d4322cc9-4942-4297-839d-704346ff5725 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received event network-vif-plugged-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1082.027757] env[68244]: DEBUG oslo_concurrency.lockutils [req-6aa589fd-ab4b-4aee-9d79-c4e313f12c23 req-d4322cc9-4942-4297-839d-704346ff5725 service nova] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.028198] env[68244]: DEBUG oslo_concurrency.lockutils [req-6aa589fd-ab4b-4aee-9d79-c4e313f12c23 req-d4322cc9-4942-4297-839d-704346ff5725 service nova] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.028509] env[68244]: DEBUG oslo_concurrency.lockutils [req-6aa589fd-ab4b-4aee-9d79-c4e313f12c23 req-d4322cc9-4942-4297-839d-704346ff5725 service nova] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.028742] env[68244]: DEBUG nova.compute.manager [req-6aa589fd-ab4b-4aee-9d79-c4e313f12c23 req-d4322cc9-4942-4297-839d-704346ff5725 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] No waiting events found dispatching network-vif-plugged-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.029025] env[68244]: WARNING nova.compute.manager [req-6aa589fd-ab4b-4aee-9d79-c4e313f12c23 req-d4322cc9-4942-4297-839d-704346ff5725 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received unexpected event network-vif-plugged-abbd3e34-9461-4503-86ee-598fe02a65d3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1082.054994] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "477da9d1-8550-48be-b243-519b4f0ca443" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.055377] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "477da9d1-8550-48be-b243-519b4f0ca443" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.055740] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "477da9d1-8550-48be-b243-519b4f0ca443-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.059187] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "477da9d1-8550-48be-b243-519b4f0ca443-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.003s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.059386] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "477da9d1-8550-48be-b243-519b4f0ca443-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.062200] env[68244]: INFO nova.compute.manager [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Terminating instance [ 1082.177194] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780940, 'name': Destroy_Task, 'duration_secs': 0.699719} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.177365] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Destroyed the VM [ 1082.177649] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1082.178028] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-be9f6543-4093-4384-9de0-de77f59383d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.189492] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1082.189492] env[68244]: value = "task-2780941" [ 1082.189492] env[68244]: _type = "Task" [ 1082.189492] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.201940] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780941, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.225833] env[68244]: DEBUG nova.compute.utils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1082.231700] env[68244]: DEBUG nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1082.231795] env[68244]: DEBUG nova.network.neutron [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1082.274746] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4b6f2794-8f81-47fa-8942-a965559d3411 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "60c502f4-8c4b-433e-ad4f-9351048abe11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.886s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.323780] env[68244]: DEBUG nova.policy [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a08e9587e104fe5bd6cc0bb5e877d3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '086fc6c89607495aa724a0c0776c78bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1082.436703] env[68244]: DEBUG nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1082.567946] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "refresh_cache-477da9d1-8550-48be-b243-519b4f0ca443" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.568509] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquired lock "refresh_cache-477da9d1-8550-48be-b243-519b4f0ca443" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.568509] env[68244]: DEBUG nova.network.neutron [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.576486] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77188b44-0d72-44a7-a8f1-42c940234f55 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.586731] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4d94cf-09bd-442c-b081-b800b806815f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.628826] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3435c4-9255-4fa9-b5d7-59ae3d878c98 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.642819] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cfa96d-4d90-4d81-8141-418352d3eb18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.659657] env[68244]: DEBUG nova.compute.provider_tree [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.679971] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "f9f6c504-f140-4c90-994b-d3ec2d148796" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.680388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.707534] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780941, 'name': RemoveSnapshot_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.731982] env[68244]: DEBUG nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1082.750509] env[68244]: DEBUG nova.network.neutron [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabbd3e34-94", "ovs_interfaceid": "abbd3e34-9461-4503-86ee-598fe02a65d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.854395] env[68244]: DEBUG nova.network.neutron [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Successfully created port: f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1082.979412] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.069595] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.069595] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2c2636a-eb42-425c-8489-9bef07eaeb12 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.082770] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1083.082770] env[68244]: value = "task-2780942" [ 1083.082770] env[68244]: _type = "Task" [ 1083.082770] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.096172] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.106159] env[68244]: DEBUG nova.network.neutron [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1083.163543] env[68244]: DEBUG nova.scheduler.client.report [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.169308] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.169567] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.183570] env[68244]: DEBUG nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1083.198510] env[68244]: INFO nova.compute.manager [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Unrescuing [ 1083.198697] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.198861] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquired lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.199071] env[68244]: DEBUG nova.network.neutron [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.205978] env[68244]: DEBUG oslo_vmware.api [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780941, 'name': RemoveSnapshot_Task, 'duration_secs': 0.709422} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.206236] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1083.206516] env[68244]: INFO nova.compute.manager [None req-4f227214-5351-4ee3-961a-c6e816cae269 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Took 16.19 seconds to snapshot the instance on the hypervisor. [ 1083.221055] env[68244]: DEBUG nova.network.neutron [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.253996] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.293562] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d4ccbb63a0681af594dacab72ccb2177',container_format='bare',created_at=2025-03-06T03:26:59Z,direct_url=,disk_format='vmdk',id=2c6b03e0-6a7f-4133-a46a-ec51ea0c7799,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1716222306-shelved',owner='fd48f74a8554407593bb2c69b3191d85',properties=ImageMetaProps,protected=,size=31593472,status='active',tags=,updated_at=2025-03-06T03:27:14Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.293804] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.293956] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.299074] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.299305] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.299473] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.299750] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.300025] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.300246] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.300422] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.300613] env[68244]: DEBUG nova.virt.hardware [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.301637] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0d8905-c4e5-4044-a565-6c4a73d9ef46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.317545] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7736a130-7679-4ac7-9e07-806917191000 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.341329] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:38:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba4f6497-e2b4-43b5-9819-6927865ae974', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abbd3e34-9461-4503-86ee-598fe02a65d3', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.349692] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.350379] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.350608] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56f91c49-497c-4275-9e59-cc9d8ea1d3b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.377559] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.377559] env[68244]: value = "task-2780943" [ 1083.377559] env[68244]: _type = "Task" [ 1083.377559] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.387451] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780943, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.594221] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780942, 'name': PowerOffVM_Task, 'duration_secs': 0.227272} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.594579] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1083.595436] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d009ed42-9ab3-4978-b227-3a812e03512e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.616307] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7757d9-3c20-4a16-82c4-2b7cd64e0663 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.650322] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.650515] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74f1bb64-bc89-481f-977c-614929e80988 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.657794] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1083.657794] env[68244]: value = "task-2780944" [ 1083.657794] env[68244]: _type = "Task" [ 1083.657794] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.665578] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780944, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.674574] env[68244]: DEBUG nova.compute.utils [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1083.705336] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.721355] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Releasing lock "refresh_cache-477da9d1-8550-48be-b243-519b4f0ca443" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.721720] env[68244]: DEBUG nova.compute.manager [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1083.721914] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1083.722931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289e3994-222b-4da7-991d-ac492d389547 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.735349] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1083.735917] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0063c0d-a425-4887-b03d-e08f3fd6aeb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.743219] env[68244]: DEBUG oslo_vmware.api [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1083.743219] env[68244]: value = "task-2780945" [ 1083.743219] env[68244]: _type = "Task" [ 1083.743219] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.752835] env[68244]: DEBUG nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1083.764026] env[68244]: DEBUG oslo_vmware.api [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.779978] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.780598] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.780863] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.781143] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.781355] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.781580] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.781855] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.782114] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.782311] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.782478] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.782701] env[68244]: DEBUG nova.virt.hardware [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.783965] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8fa54a-8981-41e8-a1d9-b22f34bfc4bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.799326] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6b4391-e802-4a4d-84f2-15506df61e00 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.823238] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.823559] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.890017] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780943, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.962995] env[68244]: DEBUG nova.network.neutron [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.168171] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1084.168396] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.168679] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.168790] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.168972] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.169261] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4828edc9-0c17-4e20-9e74-04ac8daa8950 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.173511] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.452s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.176266] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.710s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.176497] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.178718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.132s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.178909] env[68244]: DEBUG nova.objects.instance [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1084.186095] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.017s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.186594] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.186761] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.192018] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37514b94-fe2b-4fc5-a324-ca4e28cfada2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.204688] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1084.204688] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d7fac8-4d22-abed-8334-6de7ebae5637" [ 1084.204688] env[68244]: _type = "Task" [ 1084.204688] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.215574] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d7fac8-4d22-abed-8334-6de7ebae5637, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.254623] env[68244]: DEBUG oslo_vmware.api [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780945, 'name': PowerOffVM_Task, 'duration_secs': 0.146773} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.254953] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1084.255152] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1084.255927] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-741dcfed-b70d-4f6a-b93a-bba8da6e28e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.282398] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1084.282398] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1084.282398] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Deleting the datastore file [datastore2] 477da9d1-8550-48be-b243-519b4f0ca443 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1084.282398] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32f037cd-d588-4b5e-9d9f-d9c6e296a254 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.287657] env[68244]: DEBUG oslo_vmware.api [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for the task: (returnval){ [ 1084.287657] env[68244]: value = "task-2780947" [ 1084.287657] env[68244]: _type = "Task" [ 1084.287657] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.298155] env[68244]: DEBUG oslo_vmware.api [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780947, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.328574] env[68244]: DEBUG nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1084.388827] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780943, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.468403] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Releasing lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.468403] env[68244]: DEBUG nova.objects.instance [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lazy-loading 'flavor' on Instance uuid cedcff81-0010-4fa6-95bf-72a4dcac5427 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.521383] env[68244]: DEBUG nova.network.neutron [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Successfully updated port: f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.591270] env[68244]: DEBUG nova.compute.manager [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received event network-changed-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1084.591486] env[68244]: DEBUG nova.compute.manager [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Refreshing instance network info cache due to event network-changed-abbd3e34-9461-4503-86ee-598fe02a65d3. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1084.592344] env[68244]: DEBUG oslo_concurrency.lockutils [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] Acquiring lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.592344] env[68244]: DEBUG oslo_concurrency.lockutils [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] Acquired lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.592344] env[68244]: DEBUG nova.network.neutron [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Refreshing network info cache for port abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.699025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c9f5648-4f48-495e-b81d-7764d38f2609 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 35.184s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.699025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 12.166s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.699187] env[68244]: INFO nova.compute.manager [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Unshelving [ 1084.719329] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d7fac8-4d22-abed-8334-6de7ebae5637, 'name': SearchDatastore_Task, 'duration_secs': 0.014019} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.720213] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac27641c-9468-4aa0-891b-632326b2b322 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.729285] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1084.729285] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5268210e-2861-087f-1c17-ad034350edbf" [ 1084.729285] env[68244]: _type = "Task" [ 1084.729285] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.740309] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5268210e-2861-087f-1c17-ad034350edbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.742200] env[68244]: INFO nova.scheduler.client.report [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted allocation for migration 9d3432d6-d1b5-4178-bedc-2e9fe7a3754c [ 1084.802398] env[68244]: DEBUG oslo_vmware.api [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Task: {'id': task-2780947, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097329} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.802665] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1084.802901] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1084.803040] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1084.803216] env[68244]: INFO nova.compute.manager [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1084.804029] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1084.804029] env[68244]: DEBUG nova.compute.manager [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1084.804029] env[68244]: DEBUG nova.network.neutron [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1084.824058] env[68244]: DEBUG nova.network.neutron [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1084.861816] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.893234] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780943, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.943700] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "75bec02f-82f7-4e8d-81da-3c511588be29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.943838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "75bec02f-82f7-4e8d-81da-3c511588be29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.971875] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa4d27d-1f85-44b7-ba64-51a080e5f4d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.999938] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1085.000206] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b6b0cac-4d36-4966-8d5c-2ba1ad5e4e6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.007609] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1085.007609] env[68244]: value = "task-2780948" [ 1085.007609] env[68244]: _type = "Task" [ 1085.007609] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.016545] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780948, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.025019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.025019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquired lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.025019] env[68244]: DEBUG nova.network.neutron [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.199459] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72e9aafe-fa53-4314-a232-9c0caad8dbc4 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.200782] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.221s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.203256] env[68244]: INFO nova.compute.claims [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1085.241117] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5268210e-2861-087f-1c17-ad034350edbf, 'name': SearchDatastore_Task, 'duration_secs': 0.00954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.242045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.242318] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. {{(pid=68244) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1085.242591] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4adeb3a-4b36-4291-958b-6490a0d3efb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.247355] env[68244]: DEBUG oslo_concurrency.lockutils [None req-73defabd-e63a-489a-98b0-a9bca7eff131 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 19.741s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.254477] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1085.254477] env[68244]: value = "task-2780949" [ 1085.254477] env[68244]: _type = "Task" [ 1085.254477] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.255178] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.255717] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.258070] env[68244]: INFO nova.compute.manager [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Attaching volume 81bf204f-6899-4aff-b7a9-850f43b0444c to /dev/sdb [ 1085.280854] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.326066] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52ca94f-90de-4407-8e8b-b9d4669c8565 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.329121] env[68244]: DEBUG nova.network.neutron [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.334821] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ee9149-335d-45c2-af0b-d15a4fb91e0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.349309] env[68244]: DEBUG nova.virt.block_device [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updating existing volume attachment record: cd561dc4-6ab0-44c9-8f83-bff85141a920 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1085.401140] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780943, 'name': CreateVM_Task, 'duration_secs': 1.655222} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.402128] env[68244]: DEBUG nova.network.neutron [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updated VIF entry in instance network info cache for port abbd3e34-9461-4503-86ee-598fe02a65d3. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1085.402479] env[68244]: DEBUG nova.network.neutron [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabbd3e34-94", "ovs_interfaceid": "abbd3e34-9461-4503-86ee-598fe02a65d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.404013] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1085.404407] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.404407] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.404723] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1085.405611] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a209795-b44a-4b56-87bf-2e22bb885b49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.412470] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1085.412470] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a3c587-90ee-08e7-abfe-bb84818165e3" [ 1085.412470] env[68244]: _type = "Task" [ 1085.412470] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.423651] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a3c587-90ee-08e7-abfe-bb84818165e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.446528] env[68244]: DEBUG nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1085.520216] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780948, 'name': PowerOffVM_Task, 'duration_secs': 0.237438} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.520216] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.525177] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfiguring VM instance instance-00000038 to detach disk 2002 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1085.525531] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc4b2bdd-15ea-47c6-8ff3-d7f886dbc850 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.550087] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1085.550087] env[68244]: value = "task-2780952" [ 1085.550087] env[68244]: _type = "Task" [ 1085.550087] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.560054] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780952, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.582383] env[68244]: DEBUG nova.network.neutron [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.714293] env[68244]: DEBUG nova.compute.utils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1085.747271] env[68244]: DEBUG nova.network.neutron [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Updating instance_info_cache with network_info: [{"id": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "address": "fa:16:3e:13:c3:81", "network": {"id": "8566040b-a0d0-45fa-a575-25bf080336db", "bridge": "br-int", "label": "tempest-ServersTestJSON-908114910-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "086fc6c89607495aa724a0c0776c78bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf72f005b-3f", "ovs_interfaceid": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.772100] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780949, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.834263] env[68244]: INFO nova.compute.manager [-] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Took 1.03 seconds to deallocate network for instance. [ 1085.893789] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "9658b4e0-f4f9-4628-b700-19d94800961c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.894390] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.894740] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.895390] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.895390] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.898585] env[68244]: INFO nova.compute.manager [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Terminating instance [ 1085.906897] env[68244]: DEBUG oslo_concurrency.lockutils [req-edc413be-7e59-4eca-937b-8e164491409e req-da75a9d6-1bdf-4dbb-9b2d-b8447d44dd2f service nova] Releasing lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.925578] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.925960] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Processing image 2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1085.926359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.926556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.926758] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1085.927946] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfd0cd2c-d9b9-4f22-88a8-2fda0074f0c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.939608] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1085.939608] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1085.940629] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7623b357-6a7c-492c-a879-3c4043e488df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.947356] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1085.947356] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52695641-096a-7d80-b26c-e81fc4272d67" [ 1085.947356] env[68244]: _type = "Task" [ 1085.947356] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.959789] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52695641-096a-7d80-b26c-e81fc4272d67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.969211] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.060330] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780952, 'name': ReconfigVM_Task, 'duration_secs': 0.42241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.060651] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfigured VM instance instance-00000038 to detach disk 2002 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1086.060950] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.061192] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b212f7b-80e2-4eea-b2d2-bbf96c9e205b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.068030] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1086.068030] env[68244]: value = "task-2780954" [ 1086.068030] env[68244]: _type = "Task" [ 1086.068030] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.076326] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.218561] env[68244]: INFO nova.virt.block_device [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Booting with volume c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb at /dev/sdb [ 1086.250318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Releasing lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.250595] env[68244]: DEBUG nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Instance network_info: |[{"id": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "address": "fa:16:3e:13:c3:81", "network": {"id": "8566040b-a0d0-45fa-a575-25bf080336db", "bridge": "br-int", "label": "tempest-ServersTestJSON-908114910-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "086fc6c89607495aa724a0c0776c78bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf72f005b-3f", "ovs_interfaceid": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1086.251029] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:c3:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f72f005b-3ff1-4910-9fdc-4d4b32362aaf', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.259101] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Creating folder: Project (086fc6c89607495aa724a0c0776c78bc). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1086.262488] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9074c746-0e54-441a-80ed-0666fd661fb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.264461] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e283f4a-6460-47ab-b2a5-c21d1e463ac3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.275610] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576435} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.278912] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. [ 1086.280889] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532335b9-8515-4edc-aad7-1f5112273553 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.285779] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d467b6-399f-466e-a15f-7175e02ad2d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.297414] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Created folder: Project (086fc6c89607495aa724a0c0776c78bc) in parent group-v558876. [ 1086.297598] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Creating folder: Instances. Parent ref: group-v559118. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1086.300033] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85d38999-2993-4bc9-983d-5a1535b80f87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.324535] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.338827] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d61dc2a-4b92-41b4-869b-fa99df580f24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.352232] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Created folder: Instances in parent group-v559118. [ 1086.352513] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1086.353457] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.354242] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1086.354449] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1c5e38a-894d-4bad-a432-7ee9808b1849 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.356678] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-707cccf3-2265-47b6-9bf3-8641fff10348 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.376769] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1086.376769] env[68244]: value = "task-2780957" [ 1086.376769] env[68244]: _type = "Task" [ 1086.376769] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.382947] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.382947] env[68244]: value = "task-2780958" [ 1086.382947] env[68244]: _type = "Task" [ 1086.382947] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.386452] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db242cc-a886-4307-bd7c-764195393f7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.407191] env[68244]: DEBUG nova.compute.manager [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1086.407428] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.407814] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780957, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.409936] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e043877-498e-4893-a7dc-d92a9f2113ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.417715] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780958, 'name': CreateVM_Task} progress is 15%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.422253] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.433896] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab1fb247-fbbd-41ba-8aee-70300e09ae78 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.437559] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa21bf2-1133-423d-bf17-9deb74e0fcd1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.445567] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8454d16-7b9f-4b69-a0db-377127f4e135 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.448765] env[68244]: DEBUG oslo_vmware.api [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1086.448765] env[68244]: value = "task-2780959" [ 1086.448765] env[68244]: _type = "Task" [ 1086.448765] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.470466] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "4eb691f4-567e-412c-ba04-792ee9a21135" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.470768] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "4eb691f4-567e-412c-ba04-792ee9a21135" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.472503] env[68244]: DEBUG nova.virt.block_device [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating existing volume attachment record: a5a1b442-e6dc-4561-8ac5-b0af986aaf44 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1086.482986] env[68244]: DEBUG oslo_vmware.api [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.486656] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Preparing fetch location {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1086.486656] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Fetch image to [datastore2] OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760/OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760.vmdk {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1086.486656] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Downloading stream optimized image 2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 to [datastore2] OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760/OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760.vmdk on the data store datastore2 as vApp {{(pid=68244) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1086.486656] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Downloading image file data 2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 to the ESX as VM named 'OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760' {{(pid=68244) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1086.576093] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1086.576093] env[68244]: value = "resgroup-9" [ 1086.576093] env[68244]: _type = "ResourcePool" [ 1086.576093] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1086.576093] env[68244]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b1e7c02d-b6d2-4277-aa4a-54dec25b619d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.601495] env[68244]: DEBUG oslo_vmware.api [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2780954, 'name': PowerOnVM_Task, 'duration_secs': 0.46882} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.601495] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1086.601495] env[68244]: DEBUG nova.compute.manager [None req-ebb5757f-2f8d-4c4a-8b1c-4271b251951e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1086.601495] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lease: (returnval){ [ 1086.601495] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182b74-9fca-8b34-e182-28ce5ca623b6" [ 1086.601495] env[68244]: _type = "HttpNfcLease" [ 1086.601495] env[68244]: } obtained for vApp import into resource pool (val){ [ 1086.601495] env[68244]: value = "resgroup-9" [ 1086.601495] env[68244]: _type = "ResourcePool" [ 1086.601495] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1086.601843] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the lease: (returnval){ [ 1086.601843] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182b74-9fca-8b34-e182-28ce5ca623b6" [ 1086.601843] env[68244]: _type = "HttpNfcLease" [ 1086.601843] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1086.605631] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb435032-b495-4214-bc8f-a2dc39ea8649 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.619841] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1086.619841] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182b74-9fca-8b34-e182-28ce5ca623b6" [ 1086.619841] env[68244]: _type = "HttpNfcLease" [ 1086.619841] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1086.628179] env[68244]: DEBUG nova.compute.manager [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Received event network-vif-plugged-f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1086.628263] env[68244]: DEBUG oslo_concurrency.lockutils [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] Acquiring lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.628543] env[68244]: DEBUG oslo_concurrency.lockutils [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.628798] env[68244]: DEBUG oslo_concurrency.lockutils [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.629126] env[68244]: DEBUG nova.compute.manager [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] No waiting events found dispatching network-vif-plugged-f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1086.629238] env[68244]: WARNING nova.compute.manager [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Received unexpected event network-vif-plugged-f72f005b-3ff1-4910-9fdc-4d4b32362aaf for instance with vm_state building and task_state spawning. [ 1086.629433] env[68244]: DEBUG nova.compute.manager [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Received event network-changed-f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1086.629613] env[68244]: DEBUG nova.compute.manager [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Refreshing instance network info cache due to event network-changed-f72f005b-3ff1-4910-9fdc-4d4b32362aaf. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1086.629827] env[68244]: DEBUG oslo_concurrency.lockutils [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] Acquiring lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.629966] env[68244]: DEBUG oslo_concurrency.lockutils [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] Acquired lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.630686] env[68244]: DEBUG nova.network.neutron [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Refreshing network info cache for port f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.673596] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671fd2e8-b07d-4676-b020-44ae80850200 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.684275] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3672285b-f8d3-4483-9945-5b3f656f3ef2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.724017] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d8f263-e1f6-4932-8630-a3abeacd7568 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.729900] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d06483-1b18-478b-8f8c-5b4315062099 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.746041] env[68244]: DEBUG nova.compute.provider_tree [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.887399] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780957, 'name': ReconfigVM_Task, 'duration_secs': 0.415193} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.887739] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Reconfigured VM instance instance-00000056 to attach disk [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.888704] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8778b863-2ea2-4601-9199-02e33ecf7590 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.919063] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-560ba88c-1954-40a1-a87b-3ff11391903b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.934608] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780958, 'name': CreateVM_Task, 'duration_secs': 0.405368} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.935822] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1086.936183] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1086.936183] env[68244]: value = "task-2780962" [ 1086.936183] env[68244]: _type = "Task" [ 1086.936183] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.936811] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.936974] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.937297] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1086.937673] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f016de4-6556-4851-9027-e9521195a127 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.945866] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1086.945866] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525221ea-6169-d93d-0d7c-a6d0da70fd42" [ 1086.945866] env[68244]: _type = "Task" [ 1086.945866] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.949190] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780962, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.964010] env[68244]: DEBUG oslo_vmware.api [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780959, 'name': PowerOffVM_Task, 'duration_secs': 0.223703} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.967073] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.967269] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1086.967538] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525221ea-6169-d93d-0d7c-a6d0da70fd42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.968136] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20bc9525-be03-4b4d-834f-85ea22d5a353 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.979954] env[68244]: DEBUG nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1087.037199] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.037454] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.037705] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleting the datastore file [datastore2] 9658b4e0-f4f9-4628-b700-19d94800961c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.038229] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afe4aa9a-44ab-4b8f-b1fd-ab8a47964d1d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.051281] env[68244]: DEBUG oslo_vmware.api [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1087.051281] env[68244]: value = "task-2780966" [ 1087.051281] env[68244]: _type = "Task" [ 1087.051281] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.059541] env[68244]: DEBUG oslo_vmware.api [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.112305] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1087.112305] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182b74-9fca-8b34-e182-28ce5ca623b6" [ 1087.112305] env[68244]: _type = "HttpNfcLease" [ 1087.112305] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1087.249451] env[68244]: DEBUG nova.scheduler.client.report [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.256562] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.256855] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.257100] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.257374] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.257569] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.261476] env[68244]: INFO nova.compute.manager [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Terminating instance [ 1087.454415] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780962, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.472293] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525221ea-6169-d93d-0d7c-a6d0da70fd42, 'name': SearchDatastore_Task, 'duration_secs': 0.020073} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.475017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.475017] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1087.475017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.475017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.475017] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.475017] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-292d9148-4928-4deb-a4dd-7b2294fb95b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.476351] env[68244]: DEBUG nova.network.neutron [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Updated VIF entry in instance network info cache for port f72f005b-3ff1-4910-9fdc-4d4b32362aaf. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.476668] env[68244]: DEBUG nova.network.neutron [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Updating instance_info_cache with network_info: [{"id": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "address": "fa:16:3e:13:c3:81", "network": {"id": "8566040b-a0d0-45fa-a575-25bf080336db", "bridge": "br-int", "label": "tempest-ServersTestJSON-908114910-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "086fc6c89607495aa724a0c0776c78bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf72f005b-3f", "ovs_interfaceid": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.484724] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.484864] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1087.487626] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d9b2abc-7e61-4ebb-9228-427fae483998 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.495955] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1087.495955] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52635e0a-b0be-90d5-6b85-d37949279d4a" [ 1087.495955] env[68244]: _type = "Task" [ 1087.495955] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.504442] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52635e0a-b0be-90d5-6b85-d37949279d4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.508807] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.559492] env[68244]: DEBUG oslo_vmware.api [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2780966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175251} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.559804] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.560061] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.560267] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.560685] env[68244]: INFO nova.compute.manager [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1087.560943] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.561196] env[68244]: DEBUG nova.compute.manager [-] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1087.561295] env[68244]: DEBUG nova.network.neutron [-] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1087.615879] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1087.615879] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182b74-9fca-8b34-e182-28ce5ca623b6" [ 1087.615879] env[68244]: _type = "HttpNfcLease" [ 1087.615879] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1087.754426] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.755010] env[68244]: DEBUG nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1087.757932] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.053s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.761379] env[68244]: INFO nova.compute.claims [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.768229] env[68244]: DEBUG nova.compute.manager [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1087.768229] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1087.768834] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078cd46d-eba7-4ec2-9747-0c5add61dddc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.780022] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1087.780022] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c3df328-ce0c-4edf-8a3d-9fde4ab71f06 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.783294] env[68244]: DEBUG oslo_vmware.api [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1087.783294] env[68244]: value = "task-2780968" [ 1087.783294] env[68244]: _type = "Task" [ 1087.783294] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.793913] env[68244]: DEBUG oslo_vmware.api [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.948715] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780962, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.979521] env[68244]: DEBUG oslo_concurrency.lockutils [req-32dc7900-288e-4a6b-b2bc-c494c06700a7 req-0926854a-2e72-4690-9099-a4ec6f1ba17e service nova] Releasing lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.006394] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52635e0a-b0be-90d5-6b85-d37949279d4a, 'name': SearchDatastore_Task, 'duration_secs': 0.015779} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.007304] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-335f259c-5d2e-4294-8f1b-ff68a5d5b9d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.012728] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1088.012728] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ce44bd-0024-536c-e974-686a8896f0e2" [ 1088.012728] env[68244]: _type = "Task" [ 1088.012728] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.021937] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ce44bd-0024-536c-e974-686a8896f0e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.114601] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1088.114601] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182b74-9fca-8b34-e182-28ce5ca623b6" [ 1088.114601] env[68244]: _type = "HttpNfcLease" [ 1088.114601] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1088.114912] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1088.114912] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52182b74-9fca-8b34-e182-28ce5ca623b6" [ 1088.114912] env[68244]: _type = "HttpNfcLease" [ 1088.114912] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1088.115671] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdcacf2-7aa1-49ba-8170-04d0d9c402f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.124526] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb0d3e-77bb-23f6-60a9-768382a6c55c/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1088.124783] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating HTTP connection to write to file with size = 31593472 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb0d3e-77bb-23f6-60a9-768382a6c55c/disk-0.vmdk. {{(pid=68244) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1088.194032] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5a030011-6183-43f0-9280-9affea783fe7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.266365] env[68244]: DEBUG nova.compute.utils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1088.270117] env[68244]: DEBUG nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1088.270264] env[68244]: DEBUG nova.network.neutron [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1088.294901] env[68244]: DEBUG oslo_vmware.api [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780968, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.305438] env[68244]: DEBUG nova.network.neutron [-] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.323652] env[68244]: DEBUG nova.policy [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1088.449521] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780962, 'name': ReconfigVM_Task, 'duration_secs': 1.226081} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.449840] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1088.450174] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33167a49-8794-4ef2-abf5-cacb94223485 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.457497] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1088.457497] env[68244]: value = "task-2780969" [ 1088.457497] env[68244]: _type = "Task" [ 1088.457497] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.466644] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.523293] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ce44bd-0024-536c-e974-686a8896f0e2, 'name': SearchDatastore_Task, 'duration_secs': 0.0508} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.523607] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.523874] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] fc75039c-f2d0-4d4b-9a82-b605b6ba63d5/fc75039c-f2d0-4d4b-9a82-b605b6ba63d5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1088.524165] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da1e5053-1af7-489c-a30c-ee976548172a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.533482] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1088.533482] env[68244]: value = "task-2780970" [ 1088.533482] env[68244]: _type = "Task" [ 1088.533482] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.539032] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.716851] env[68244]: DEBUG nova.compute.manager [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Received event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1088.716851] env[68244]: DEBUG nova.compute.manager [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing instance network info cache due to event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1088.716851] env[68244]: DEBUG oslo_concurrency.lockutils [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] Acquiring lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.716851] env[68244]: DEBUG oslo_concurrency.lockutils [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] Acquired lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.716851] env[68244]: DEBUG nova.network.neutron [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.771206] env[68244]: DEBUG nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1088.797826] env[68244]: DEBUG oslo_vmware.api [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780968, 'name': PowerOffVM_Task, 'duration_secs': 0.536908} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.799577] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1088.799784] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1088.800079] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-873662e4-e91c-4136-bd9d-4cf06a385d0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.808303] env[68244]: INFO nova.compute.manager [-] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Took 1.25 seconds to deallocate network for instance. [ 1088.882734] env[68244]: DEBUG nova.network.neutron [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Successfully created port: e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.971883] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780969, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.046800] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780970, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.118102] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7040bc-9f76-4550-a7fe-5bdb2e46844e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.128687] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9b6dad-cd13-483b-a9e9-50f2560f88bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.176936] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bd8f36-1e27-4057-b7b5-5a615b906e94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.194027] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200ebe0d-2c72-440b-93da-1256945159c2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.209163] env[68244]: DEBUG nova.compute.provider_tree [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.292545] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.292703] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.292903] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Deleting the datastore file [datastore2] f2e57bf9-05ee-49d8-846d-c3bf5920ae96 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.296142] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b86a9684-ba23-462c-ba9c-af2ced709638 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.307626] env[68244]: DEBUG oslo_vmware.api [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1089.307626] env[68244]: value = "task-2780973" [ 1089.307626] env[68244]: _type = "Task" [ 1089.307626] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.315113] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.319241] env[68244]: DEBUG oslo_vmware.api [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.471784] env[68244]: DEBUG oslo_vmware.api [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2780969, 'name': PowerOnVM_Task, 'duration_secs': 0.830143} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.472224] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1089.478025] env[68244]: DEBUG nova.compute.manager [None req-7fb568aa-a4d6-4827-a7ca-70e08e23bc95 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1089.481448] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d5ec4f-0fe2-48d6-9722-e38ddc6b47d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.546103] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704062} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.546103] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] fc75039c-f2d0-4d4b-9a82-b605b6ba63d5/fc75039c-f2d0-4d4b-9a82-b605b6ba63d5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1089.546103] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1089.546103] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38f9b87a-891c-4e3e-af53-3d75c62841ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.552288] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1089.552288] env[68244]: value = "task-2780974" [ 1089.552288] env[68244]: _type = "Task" [ 1089.552288] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.564042] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780974, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.600791] env[68244]: DEBUG nova.network.neutron [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updated VIF entry in instance network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1089.601170] env[68244]: DEBUG nova.network.neutron [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.712503] env[68244]: DEBUG nova.scheduler.client.report [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.789642] env[68244]: DEBUG nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1089.817176] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.817508] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.817801] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.817938] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.818388] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.818649] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.818969] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.819257] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.819550] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.819838] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.820150] env[68244]: DEBUG nova.virt.hardware [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.821403] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c52842e-c1a4-4925-b086-f12e23f061d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.835327] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Completed reading data from the image iterator. {{(pid=68244) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1089.835609] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb0d3e-77bb-23f6-60a9-768382a6c55c/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1089.836024] env[68244]: DEBUG oslo_vmware.api [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2780973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25441} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.837172] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04947bfe-ccc0-46f1-81ff-aa83fcb77a0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.840385] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1089.840796] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1089.840796] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.840973] env[68244]: INFO nova.compute.manager [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Took 2.07 seconds to destroy the instance on the hypervisor. [ 1089.841201] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.841403] env[68244]: DEBUG nova.compute.manager [-] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1089.841560] env[68244]: DEBUG nova.network.neutron [-] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1089.850501] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa45ed7-de89-4547-8630-7bd437cf8c11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.855591] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb0d3e-77bb-23f6-60a9-768382a6c55c/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1089.855591] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb0d3e-77bb-23f6-60a9-768382a6c55c/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1089.855591] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-79740847-5ff6-473d-9372-1dbce0dc4432 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.902402] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1089.903130] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559117', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'name': 'volume-81bf204f-6899-4aff-b7a9-850f43b0444c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ba4f3f5-726e-482f-a821-d2ee1bbd4c33', 'attached_at': '', 'detached_at': '', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'serial': '81bf204f-6899-4aff-b7a9-850f43b0444c'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1089.903971] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54bf87bc-0f64-4de0-ac7e-b5d2960b53a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.924125] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7788687-3f42-4c6e-9fb9-7c518d010825 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.953268] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] volume-81bf204f-6899-4aff-b7a9-850f43b0444c/volume-81bf204f-6899-4aff-b7a9-850f43b0444c.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1089.954027] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bee7678c-654f-40de-bd75-be07fa26b9a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.972729] env[68244]: DEBUG oslo_vmware.api [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1089.972729] env[68244]: value = "task-2780975" [ 1089.972729] env[68244]: _type = "Task" [ 1089.972729] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.982114] env[68244]: DEBUG oslo_vmware.api [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780975, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.063130] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780974, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075178} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.064030] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1090.064499] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb954d6-390e-4139-b9cc-62e90856b719 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.093324] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] fc75039c-f2d0-4d4b-9a82-b605b6ba63d5/fc75039c-f2d0-4d4b-9a82-b605b6ba63d5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.095292] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a503399-524e-4f17-b7b8-226c653b960f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.110878] env[68244]: DEBUG nova.network.neutron [-] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.112151] env[68244]: DEBUG oslo_concurrency.lockutils [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] Releasing lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.112376] env[68244]: DEBUG nova.compute.manager [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Received event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.112539] env[68244]: DEBUG nova.compute.manager [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing instance network info cache due to event network-changed-7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1090.113715] env[68244]: DEBUG oslo_concurrency.lockutils [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] Acquiring lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.113715] env[68244]: DEBUG oslo_concurrency.lockutils [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] Acquired lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.113715] env[68244]: DEBUG nova.network.neutron [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Refreshing network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.119427] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1090.119427] env[68244]: value = "task-2780976" [ 1090.119427] env[68244]: _type = "Task" [ 1090.119427] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.128991] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.218907] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.219765] env[68244]: DEBUG nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1090.224406] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.363s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.227089] env[68244]: INFO nova.compute.claims [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1090.451403] env[68244]: DEBUG nova.network.neutron [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Successfully updated port: e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1090.464108] env[68244]: DEBUG oslo_vmware.rw_handles [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb0d3e-77bb-23f6-60a9-768382a6c55c/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1090.464318] env[68244]: INFO nova.virt.vmwareapi.images [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Downloaded image file data 2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 [ 1090.465183] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7820437d-0a4c-48b2-87ab-94dea009736d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.489576] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4739fc85-c3b6-445f-a066-4e9c71308cb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.498438] env[68244]: DEBUG oslo_vmware.api [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780975, 'name': ReconfigVM_Task, 'duration_secs': 0.393069} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.499070] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Reconfigured VM instance instance-00000052 to attach disk [datastore2] volume-81bf204f-6899-4aff-b7a9-850f43b0444c/volume-81bf204f-6899-4aff-b7a9-850f43b0444c.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1090.505017] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-112e147f-b595-4d3e-aaff-2a8e21ad4204 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.520545] env[68244]: DEBUG oslo_vmware.api [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1090.520545] env[68244]: value = "task-2780978" [ 1090.520545] env[68244]: _type = "Task" [ 1090.520545] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.529766] env[68244]: DEBUG oslo_vmware.api [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780978, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.533010] env[68244]: INFO nova.virt.vmwareapi.images [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] The imported VM was unregistered [ 1090.535299] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Caching image {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1090.535523] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Creating directory with path [datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1090.535771] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d05911db-89f5-4b0b-b6d4-666aad56ddb5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.554312] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Created directory with path [datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799 {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1090.554507] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760/OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760.vmdk to [datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799.vmdk. {{(pid=68244) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1090.554714] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-70bb2619-d71e-4130-8cde-a097a2ae504f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.561337] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1090.561337] env[68244]: value = "task-2780979" [ 1090.561337] env[68244]: _type = "Task" [ 1090.561337] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.568850] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780979, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.616050] env[68244]: INFO nova.compute.manager [-] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Took 0.77 seconds to deallocate network for instance. [ 1090.632763] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780976, 'name': ReconfigVM_Task, 'duration_secs': 0.407592} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.633114] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Reconfigured VM instance instance-00000057 to attach disk [datastore2] fc75039c-f2d0-4d4b-9a82-b605b6ba63d5/fc75039c-f2d0-4d4b-9a82-b605b6ba63d5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1090.633783] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8b3e616-57b0-4122-b3dc-751b0afcb76c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.640817] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1090.640817] env[68244]: value = "task-2780980" [ 1090.640817] env[68244]: _type = "Task" [ 1090.640817] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.650296] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780980, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.731668] env[68244]: DEBUG nova.compute.utils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.735890] env[68244]: DEBUG nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.739756] env[68244]: DEBUG nova.network.neutron [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.751582] env[68244]: DEBUG nova.compute.manager [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Received event network-vif-deleted-8cb9c661-5875-4af2-9420-68539b4270e7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.751803] env[68244]: DEBUG nova.compute.manager [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received event network-vif-plugged-e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.752038] env[68244]: DEBUG oslo_concurrency.lockutils [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.752291] env[68244]: DEBUG oslo_concurrency.lockutils [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.752488] env[68244]: DEBUG oslo_concurrency.lockutils [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.752698] env[68244]: DEBUG nova.compute.manager [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] No waiting events found dispatching network-vif-plugged-e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1090.752830] env[68244]: WARNING nova.compute.manager [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received unexpected event network-vif-plugged-e2daf964-3dca-4df6-b310-952aab3796a9 for instance with vm_state building and task_state spawning. [ 1090.753319] env[68244]: DEBUG nova.compute.manager [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received event network-changed-e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.753319] env[68244]: DEBUG nova.compute.manager [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Refreshing instance network info cache due to event network-changed-e2daf964-3dca-4df6-b310-952aab3796a9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1090.753401] env[68244]: DEBUG oslo_concurrency.lockutils [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] Acquiring lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.753509] env[68244]: DEBUG oslo_concurrency.lockutils [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] Acquired lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.753683] env[68244]: DEBUG nova.network.neutron [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Refreshing network info cache for port e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.815927] env[68244]: DEBUG nova.policy [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a845273b2fd426ab3eababe8bc49ae8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b89e410836c14f57aabd078a83e66276', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.826651] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "91422c89-601c-4e5f-b5b0-fa2639031d3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.826953] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.936968] env[68244]: DEBUG nova.network.neutron [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updated VIF entry in instance network info cache for port 7cc08f9f-ecf6-45df-a147-29489ed20ade. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.937418] env[68244]: DEBUG nova.network.neutron [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [{"id": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "address": "fa:16:3e:ba:45:74", "network": {"id": "a85c9923-eb79-431b-b6d8-577d9242be09", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2053122981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c0473d09c04fb8a80d27a43c07bef4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cc08f9f-ec", "ovs_interfaceid": "7cc08f9f-ecf6-45df-a147-29489ed20ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.955455] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.033607] env[68244]: DEBUG oslo_vmware.api [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2780978, 'name': ReconfigVM_Task, 'duration_secs': 0.150379} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.034533] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559117', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'name': 'volume-81bf204f-6899-4aff-b7a9-850f43b0444c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ba4f3f5-726e-482f-a821-d2ee1bbd4c33', 'attached_at': '', 'detached_at': '', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'serial': '81bf204f-6899-4aff-b7a9-850f43b0444c'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1091.073746] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780979, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.129591] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.154968] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780980, 'name': Rename_Task, 'duration_secs': 0.198896} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.155437] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1091.155572] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63a31ee7-71b9-46b9-a243-813b216af186 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.164993] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1091.164993] env[68244]: value = "task-2780981" [ 1091.164993] env[68244]: _type = "Task" [ 1091.164993] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.174294] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.182442] env[68244]: DEBUG nova.network.neutron [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Successfully created port: f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.237162] env[68244]: DEBUG nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1091.306022] env[68244]: DEBUG nova.network.neutron [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1091.331331] env[68244]: DEBUG nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1091.441694] env[68244]: DEBUG oslo_concurrency.lockutils [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] Releasing lock "refresh_cache-cedcff81-0010-4fa6-95bf-72a4dcac5427" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.441961] env[68244]: DEBUG nova.compute.manager [req-798f0b6e-6a88-48b2-8ef8-33daf45c1950 req-2ff5d20d-fc88-4033-b4e2-6a84b7e38cc0 service nova] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Received event network-vif-deleted-c5c56d0b-9541-4af9-9b67-3e468da9557f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1091.447862] env[68244]: DEBUG nova.network.neutron [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.575207] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780979, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.635296] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71203b63-a300-42d8-a0ba-ebaf5c91cbfe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.649171] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549effaf-ecf0-476b-aafe-7b136f045eb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.687113] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b909862-3fc3-4bce-9696-bf9cb3bdffed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.697818] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780981, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.701596] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7056e76a-2d2e-4f9a-8552-3354d58ab01d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.719760] env[68244]: DEBUG nova.compute.provider_tree [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.852335] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.951133] env[68244]: DEBUG oslo_concurrency.lockutils [req-7df04e77-f23c-4824-912e-1a13f2d9c7eb req-2ca9edae-74e1-4717-8ff4-37e2a7a99f08 service nova] Releasing lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.951626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.951822] env[68244]: DEBUG nova.network.neutron [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1092.079606] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780979, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.091248] env[68244]: DEBUG nova.objects.instance [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'flavor' on Instance uuid 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.201190] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780981, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.223736] env[68244]: DEBUG nova.scheduler.client.report [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1092.249101] env[68244]: DEBUG nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1092.283849] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.284329] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.284503] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.284689] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.285170] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.285170] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.285360] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.285628] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.285833] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.286008] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.286201] env[68244]: DEBUG nova.virt.hardware [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.287664] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77dc4e24-2abf-42f7-bea4-f5707ce34cda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.301191] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483041c8-1c3d-4258-b53a-cdca7e9f25f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.497966] env[68244]: DEBUG nova.network.neutron [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1092.578877] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780979, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.597332] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da8c7977-b7f2-4deb-96a2-0ff431de3664 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.341s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.611638] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.698765] env[68244]: DEBUG oslo_vmware.api [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2780981, 'name': PowerOnVM_Task, 'duration_secs': 1.115146} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.699040] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1092.699288] env[68244]: INFO nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1092.699471] env[68244]: DEBUG nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1092.700380] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400dac37-5b3d-4836-9770-cf04758305a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.729532] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.505s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.730181] env[68244]: DEBUG nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1092.733020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.764s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.734674] env[68244]: INFO nova.compute.claims [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.787810] env[68244]: DEBUG nova.network.neutron [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [{"id": "e2daf964-3dca-4df6-b310-952aab3796a9", "address": "fa:16:3e:7e:5f:a7", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2daf964-3d", "ovs_interfaceid": "e2daf964-3dca-4df6-b310-952aab3796a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.890462] env[68244]: DEBUG nova.compute.manager [req-106c25cb-cc8e-47a1-a65d-ebb37a918c28 req-5526ed7b-ba5d-41ab-a5e0-fa63cf73134c service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Received event network-vif-plugged-f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.890679] env[68244]: DEBUG oslo_concurrency.lockutils [req-106c25cb-cc8e-47a1-a65d-ebb37a918c28 req-5526ed7b-ba5d-41ab-a5e0-fa63cf73134c service nova] Acquiring lock "f9f6c504-f140-4c90-994b-d3ec2d148796-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.891016] env[68244]: DEBUG oslo_concurrency.lockutils [req-106c25cb-cc8e-47a1-a65d-ebb37a918c28 req-5526ed7b-ba5d-41ab-a5e0-fa63cf73134c service nova] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.891348] env[68244]: DEBUG oslo_concurrency.lockutils [req-106c25cb-cc8e-47a1-a65d-ebb37a918c28 req-5526ed7b-ba5d-41ab-a5e0-fa63cf73134c service nova] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.891607] env[68244]: DEBUG nova.compute.manager [req-106c25cb-cc8e-47a1-a65d-ebb37a918c28 req-5526ed7b-ba5d-41ab-a5e0-fa63cf73134c service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] No waiting events found dispatching network-vif-plugged-f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.891728] env[68244]: WARNING nova.compute.manager [req-106c25cb-cc8e-47a1-a65d-ebb37a918c28 req-5526ed7b-ba5d-41ab-a5e0-fa63cf73134c service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Received unexpected event network-vif-plugged-f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 for instance with vm_state building and task_state spawning. [ 1093.066220] env[68244]: DEBUG nova.network.neutron [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Successfully updated port: f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.082466] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780979, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.227823] env[68244]: INFO nova.compute.manager [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Took 27.93 seconds to build instance. [ 1093.242540] env[68244]: DEBUG nova.compute.utils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1093.244038] env[68244]: DEBUG nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1093.244313] env[68244]: DEBUG nova.network.neutron [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1093.292742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.293258] env[68244]: DEBUG nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Instance network_info: |[{"id": "e2daf964-3dca-4df6-b310-952aab3796a9", "address": "fa:16:3e:7e:5f:a7", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2daf964-3d", "ovs_interfaceid": "e2daf964-3dca-4df6-b310-952aab3796a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1093.293835] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:5f:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2daf964-3dca-4df6-b310-952aab3796a9', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1093.302077] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Creating folder: Project (210583c7f70d4a77937bb82ce46d752c). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1093.302702] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25ac8b3e-dd77-44b9-9549-8bbb41cb753c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.317215] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Created folder: Project (210583c7f70d4a77937bb82ce46d752c) in parent group-v558876. [ 1093.317483] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Creating folder: Instances. Parent ref: group-v559124. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1093.317693] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e185870e-1b04-4ae6-b362-2377359eb3e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.330510] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Created folder: Instances in parent group-v559124. [ 1093.330776] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1093.330982] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1093.331221] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb59b5a8-e4e2-4dfd-8375-f0a0fafc5fed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.353858] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.353858] env[68244]: value = "task-2780984" [ 1093.353858] env[68244]: _type = "Task" [ 1093.353858] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.362546] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780984, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.390054] env[68244]: DEBUG nova.policy [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fa4f9c8b0f1450788cd56a89e23d93a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a581fe596ee49c6b66f17d1ed11d120', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1093.573997] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "refresh_cache-f9f6c504-f140-4c90-994b-d3ec2d148796" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.573997] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquired lock "refresh_cache-f9f6c504-f140-4c90-994b-d3ec2d148796" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.573997] env[68244]: DEBUG nova.network.neutron [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.583723] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780979, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.81229} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.584671] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760/OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760.vmdk to [datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799.vmdk. [ 1093.584874] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Cleaning up location [datastore2] OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760 {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1093.585122] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7e34be7d-4e65-434d-8f17-87529f122760 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1093.585415] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1efc3d97-5872-411c-84ca-4a7c429b6f8b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.593121] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1093.593121] env[68244]: value = "task-2780985" [ 1093.593121] env[68244]: _type = "Task" [ 1093.593121] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.603301] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.667575] env[68244]: DEBUG nova.network.neutron [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Successfully created port: 6d9dfff4-722c-4e27-9f86-56f3dc8eee19 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1093.730260] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5625aebf-358d-4843-a571-688bdc257240 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.444s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.749243] env[68244]: DEBUG nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1093.879555] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780984, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.080090] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3197696f-076f-43ef-a743-bc002f3ffc07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.088736] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a0980a-9d2e-41f5-b6c8-adbbb1565577 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.122055] env[68244]: DEBUG nova.network.neutron [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1094.124407] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e524838-f2dc-4f05-94fb-57d68615f18e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.131798] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187075} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.132390] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.132555] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.132799] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799.vmdk to [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.133120] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef3ca7ad-75e7-425f-98f9-56ed8d905714 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.137973] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711dc0a3-8119-47b7-8a46-0f3bbfb46dbc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.142811] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1094.142811] env[68244]: value = "task-2780986" [ 1094.142811] env[68244]: _type = "Task" [ 1094.142811] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.156982] env[68244]: DEBUG nova.compute.provider_tree [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.164019] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.276719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "10e67250-5ddc-430d-aac7-4e6bae0778e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.278111] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.304430] env[68244]: DEBUG nova.network.neutron [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Updating instance_info_cache with network_info: [{"id": "f7fd89ae-e48f-4a24-baad-9b7ce30dfea2", "address": "fa:16:3e:6d:a4:82", "network": {"id": "9505076a-4913-45d5-a7bd-0996dd85b561", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1525363977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b89e410836c14f57aabd078a83e66276", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fd89ae-e4", "ovs_interfaceid": "f7fd89ae-e48f-4a24-baad-9b7ce30dfea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.363754] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780984, 'name': CreateVM_Task, 'duration_secs': 0.830851} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.364053] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1094.364663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.364871] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.365238] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1094.365487] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ed2242-56cd-4f5a-89d9-d0d0af7b5903 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.370177] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1094.370177] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255e3fa-a2a0-fe90-ed35-695df85b7aa3" [ 1094.370177] env[68244]: _type = "Task" [ 1094.370177] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.377860] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255e3fa-a2a0-fe90-ed35-695df85b7aa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.610841] env[68244]: DEBUG nova.compute.manager [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Received event network-changed-f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1094.611085] env[68244]: DEBUG nova.compute.manager [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Refreshing instance network info cache due to event network-changed-f72f005b-3ff1-4910-9fdc-4d4b32362aaf. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1094.611321] env[68244]: DEBUG oslo_concurrency.lockutils [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] Acquiring lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.611501] env[68244]: DEBUG oslo_concurrency.lockutils [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] Acquired lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.611680] env[68244]: DEBUG nova.network.neutron [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Refreshing network info cache for port f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1094.652918] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.660269] env[68244]: DEBUG nova.scheduler.client.report [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.760903] env[68244]: DEBUG nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1094.782317] env[68244]: DEBUG nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1094.791592] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1094.791835] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1094.795021] env[68244]: DEBUG nova.virt.hardware [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1094.795021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca5636d-4909-4d96-9a6d-968397daa327 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.805101] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20c4e4e-6dc0-4b11-9b8a-bcfe5816e9b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.810033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Releasing lock "refresh_cache-f9f6c504-f140-4c90-994b-d3ec2d148796" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.810350] env[68244]: DEBUG nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Instance network_info: |[{"id": "f7fd89ae-e48f-4a24-baad-9b7ce30dfea2", "address": "fa:16:3e:6d:a4:82", "network": {"id": "9505076a-4913-45d5-a7bd-0996dd85b561", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1525363977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b89e410836c14f57aabd078a83e66276", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fd89ae-e4", "ovs_interfaceid": "f7fd89ae-e48f-4a24-baad-9b7ce30dfea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1094.810827] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:a4:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bff6c3a1-cc80-46ca-86c0-6dbb029edddb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7fd89ae-e48f-4a24-baad-9b7ce30dfea2', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1094.818721] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Creating folder: Project (b89e410836c14f57aabd078a83e66276). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1094.819515] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80df7c21-321f-4d17-bd73-1b853ad0041b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.841958] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Created folder: Project (b89e410836c14f57aabd078a83e66276) in parent group-v558876. [ 1094.842192] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Creating folder: Instances. Parent ref: group-v559127. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1094.842440] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5005f4b0-b098-41d3-982e-3433bf04daeb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.851401] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Created folder: Instances in parent group-v559127. [ 1094.851628] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1094.851827] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1094.852055] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d0abdcd-f720-4a25-8c6d-57a949181153 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.876114] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1094.876114] env[68244]: value = "task-2780989" [ 1094.876114] env[68244]: _type = "Task" [ 1094.876114] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.883804] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5255e3fa-a2a0-fe90-ed35-695df85b7aa3, 'name': SearchDatastore_Task, 'duration_secs': 0.047732} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.884556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.884783] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1094.885043] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.885203] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.885380] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1094.885646] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0aed77ee-b13c-4837-b55e-ab42d1097d1e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.890537] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780989, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.903471] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1094.903681] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1094.904449] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6597610-402b-435d-a217-abda0dcab8d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.911378] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1094.911378] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f75d9b-159a-8176-154f-fc8473937b32" [ 1094.911378] env[68244]: _type = "Task" [ 1094.911378] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.920050] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f75d9b-159a-8176-154f-fc8473937b32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.922486] env[68244]: DEBUG nova.compute.manager [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Received event network-changed-f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1094.922705] env[68244]: DEBUG nova.compute.manager [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Refreshing instance network info cache due to event network-changed-f7fd89ae-e48f-4a24-baad-9b7ce30dfea2. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1094.923450] env[68244]: DEBUG oslo_concurrency.lockutils [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] Acquiring lock "refresh_cache-f9f6c504-f140-4c90-994b-d3ec2d148796" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.923450] env[68244]: DEBUG oslo_concurrency.lockutils [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] Acquired lock "refresh_cache-f9f6c504-f140-4c90-994b-d3ec2d148796" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.923450] env[68244]: DEBUG nova.network.neutron [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Refreshing network info cache for port f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.156798] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780986, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.166083] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.166723] env[68244]: DEBUG nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1095.170070] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.817s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.170414] env[68244]: DEBUG nova.objects.instance [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lazy-loading 'resources' on Instance uuid 477da9d1-8550-48be-b243-519b4f0ca443 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.245656] env[68244]: DEBUG nova.network.neutron [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Successfully updated port: 6d9dfff4-722c-4e27-9f86-56f3dc8eee19 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1095.309946] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.389598] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780989, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.424471] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f75d9b-159a-8176-154f-fc8473937b32, 'name': SearchDatastore_Task, 'duration_secs': 0.064496} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.427704] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00516a02-3a60-4083-94db-368baddc29a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.430943] env[68244]: DEBUG nova.network.neutron [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Updated VIF entry in instance network info cache for port f72f005b-3ff1-4910-9fdc-4d4b32362aaf. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.431320] env[68244]: DEBUG nova.network.neutron [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Updating instance_info_cache with network_info: [{"id": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "address": "fa:16:3e:13:c3:81", "network": {"id": "8566040b-a0d0-45fa-a575-25bf080336db", "bridge": "br-int", "label": "tempest-ServersTestJSON-908114910-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "086fc6c89607495aa724a0c0776c78bc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf72f005b-3f", "ovs_interfaceid": "f72f005b-3ff1-4910-9fdc-4d4b32362aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.439957] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1095.439957] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5248f87c-749b-405d-8e9c-646708c9b4f1" [ 1095.439957] env[68244]: _type = "Task" [ 1095.439957] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.452793] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5248f87c-749b-405d-8e9c-646708c9b4f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.651053] env[68244]: DEBUG nova.network.neutron [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Updated VIF entry in instance network info cache for port f7fd89ae-e48f-4a24-baad-9b7ce30dfea2. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.651730] env[68244]: DEBUG nova.network.neutron [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Updating instance_info_cache with network_info: [{"id": "f7fd89ae-e48f-4a24-baad-9b7ce30dfea2", "address": "fa:16:3e:6d:a4:82", "network": {"id": "9505076a-4913-45d5-a7bd-0996dd85b561", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1525363977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b89e410836c14f57aabd078a83e66276", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fd89ae-e4", "ovs_interfaceid": "f7fd89ae-e48f-4a24-baad-9b7ce30dfea2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.657134] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780986, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.674429] env[68244]: DEBUG nova.compute.utils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1095.676513] env[68244]: DEBUG nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1095.676825] env[68244]: DEBUG nova.network.neutron [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1095.725678] env[68244]: DEBUG nova.policy [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3151a146805a456da750a47964f86f2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a151f53070d94d08bf7e85617a6f5190', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1095.754293] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "refresh_cache-5c4bb8d0-8135-4272-83c2-ef923ac52d4a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.754293] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "refresh_cache-5c4bb8d0-8135-4272-83c2-ef923ac52d4a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.754293] env[68244]: DEBUG nova.network.neutron [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1095.891539] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780989, 'name': CreateVM_Task, 'duration_secs': 0.748757} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.891756] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1095.892463] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.892633] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.893069] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1095.893664] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87177e9c-e772-4b15-a188-852c4ced8395 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.903953] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1095.903953] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ac262-e2ff-1d6f-600e-92c7738ececb" [ 1095.903953] env[68244]: _type = "Task" [ 1095.903953] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.915061] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ac262-e2ff-1d6f-600e-92c7738ececb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.935264] env[68244]: DEBUG oslo_concurrency.lockutils [req-e4d42362-5f7a-439d-99dd-f17a68326034 req-b3ea6580-e6ed-496b-93fb-d27f53747a6e service nova] Releasing lock "refresh_cache-fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.951562] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5248f87c-749b-405d-8e9c-646708c9b4f1, 'name': SearchDatastore_Task, 'duration_secs': 0.089414} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.954531] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.954925] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 788e77e1-a356-4342-9ff3-5ad13868fd77/788e77e1-a356-4342-9ff3-5ad13868fd77.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1095.955353] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3ba8984-c2a8-418d-81c1-33f6d697d5ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.965136] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1095.965136] env[68244]: value = "task-2780990" [ 1095.965136] env[68244]: _type = "Task" [ 1095.965136] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.979147] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.022480] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f6175b-6dde-4cc6-badf-6266b7fef1a0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.031655] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e3a5ac-5c17-406e-bd06-f3d0094b41c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.065372] env[68244]: DEBUG nova.network.neutron [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Successfully created port: 3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1096.067900] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e73a3da-e950-471f-9e10-6103d884397b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.077545] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f660c7e5-110e-465c-a428-ef7078f5fbed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.092695] env[68244]: DEBUG nova.compute.provider_tree [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.155891] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780986, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.158593] env[68244]: DEBUG oslo_concurrency.lockutils [req-df7df683-47ab-4503-8ae2-9d297369a953 req-b0c70af2-2787-40ea-b995-760871a16909 service nova] Releasing lock "refresh_cache-f9f6c504-f140-4c90-994b-d3ec2d148796" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.187756] env[68244]: DEBUG nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1096.305574] env[68244]: DEBUG nova.network.neutron [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1096.419047] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525ac262-e2ff-1d6f-600e-92c7738ececb, 'name': SearchDatastore_Task, 'duration_secs': 0.092673} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.419354] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.419917] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.420189] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.420339] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.420522] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.420913] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bad8b95f-0bf0-4441-87ca-e9cce4977785 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.439335] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.439528] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.440409] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2fb909f-a921-4c87-b955-373b6473c4bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.447748] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1096.447748] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f803a5-1f68-8865-3c14-56fe76408773" [ 1096.447748] env[68244]: _type = "Task" [ 1096.447748] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.457221] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f803a5-1f68-8865-3c14-56fe76408773, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.478946] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.506534] env[68244]: DEBUG nova.network.neutron [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Updating instance_info_cache with network_info: [{"id": "6d9dfff4-722c-4e27-9f86-56f3dc8eee19", "address": "fa:16:3e:82:25:a5", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d9dfff4-72", "ovs_interfaceid": "6d9dfff4-722c-4e27-9f86-56f3dc8eee19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.598932] env[68244]: DEBUG nova.scheduler.client.report [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.658312] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780986, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.960706] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f803a5-1f68-8865-3c14-56fe76408773, 'name': SearchDatastore_Task, 'duration_secs': 0.085497} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.961582] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71811c2b-a912-4bd4-a237-b1dc475a6291 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.967228] env[68244]: DEBUG nova.compute.manager [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Received event network-vif-plugged-6d9dfff4-722c-4e27-9f86-56f3dc8eee19 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1096.967228] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] Acquiring lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.967228] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.967228] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.967228] env[68244]: DEBUG nova.compute.manager [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] No waiting events found dispatching network-vif-plugged-6d9dfff4-722c-4e27-9f86-56f3dc8eee19 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1096.967763] env[68244]: WARNING nova.compute.manager [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Received unexpected event network-vif-plugged-6d9dfff4-722c-4e27-9f86-56f3dc8eee19 for instance with vm_state building and task_state spawning. [ 1096.967763] env[68244]: DEBUG nova.compute.manager [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Received event network-changed-6d9dfff4-722c-4e27-9f86-56f3dc8eee19 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1096.967763] env[68244]: DEBUG nova.compute.manager [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Refreshing instance network info cache due to event network-changed-6d9dfff4-722c-4e27-9f86-56f3dc8eee19. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1096.967879] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] Acquiring lock "refresh_cache-5c4bb8d0-8135-4272-83c2-ef923ac52d4a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.973537] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1096.973537] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529560e4-f606-0509-3f52-f3052ad8a55f" [ 1096.973537] env[68244]: _type = "Task" [ 1096.973537] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.985233] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529560e4-f606-0509-3f52-f3052ad8a55f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.988752] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.009231] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "refresh_cache-5c4bb8d0-8135-4272-83c2-ef923ac52d4a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.009839] env[68244]: DEBUG nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Instance network_info: |[{"id": "6d9dfff4-722c-4e27-9f86-56f3dc8eee19", "address": "fa:16:3e:82:25:a5", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d9dfff4-72", "ovs_interfaceid": "6d9dfff4-722c-4e27-9f86-56f3dc8eee19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1097.010041] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] Acquired lock "refresh_cache-5c4bb8d0-8135-4272-83c2-ef923ac52d4a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.010097] env[68244]: DEBUG nova.network.neutron [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Refreshing network info cache for port 6d9dfff4-722c-4e27-9f86-56f3dc8eee19 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.011355] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:25:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d9dfff4-722c-4e27-9f86-56f3dc8eee19', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1097.018975] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1097.021894] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1097.022340] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d80f6179-fba4-4c5e-a022-be7612f229ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.042580] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1097.042580] env[68244]: value = "task-2780991" [ 1097.042580] env[68244]: _type = "Task" [ 1097.042580] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.051659] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780991, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.104294] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.106416] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.598s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.107922] env[68244]: INFO nova.compute.claims [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1097.129956] env[68244]: INFO nova.scheduler.client.report [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Deleted allocations for instance 477da9d1-8550-48be-b243-519b4f0ca443 [ 1097.159090] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780986, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.959361} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.159984] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799/2c6b03e0-6a7f-4133-a46a-ec51ea0c7799.vmdk to [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.161031] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea38a487-ab48-4fb5-b28c-a2eba6f644cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.185707] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.189203] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b29a263-c016-49a7-81d5-1828d2c876d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.206845] env[68244]: DEBUG nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1097.217925] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1097.217925] env[68244]: value = "task-2780992" [ 1097.217925] env[68244]: _type = "Task" [ 1097.217925] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.227737] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780992, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.239158] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1097.239491] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1097.239711] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1097.240012] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1097.240195] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1097.240350] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1097.240576] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1097.240743] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1097.240913] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1097.241092] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1097.241269] env[68244]: DEBUG nova.virt.hardware [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1097.242188] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0058b84f-4e81-4fb7-b460-8b98b14fb1ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.253094] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefd674d-18e5-4bb5-b3f0-f125995a813c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.287513] env[68244]: DEBUG nova.network.neutron [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Updated VIF entry in instance network info cache for port 6d9dfff4-722c-4e27-9f86-56f3dc8eee19. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1097.287895] env[68244]: DEBUG nova.network.neutron [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Updating instance_info_cache with network_info: [{"id": "6d9dfff4-722c-4e27-9f86-56f3dc8eee19", "address": "fa:16:3e:82:25:a5", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d9dfff4-72", "ovs_interfaceid": "6d9dfff4-722c-4e27-9f86-56f3dc8eee19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.482925] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780990, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.491885] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529560e4-f606-0509-3f52-f3052ad8a55f, 'name': SearchDatastore_Task, 'duration_secs': 0.07395} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.492138] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.492406] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f9f6c504-f140-4c90-994b-d3ec2d148796/f9f6c504-f140-4c90-994b-d3ec2d148796.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1097.492673] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-170838a9-3ec9-417d-a928-db7575c55ae3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.499211] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1097.499211] env[68244]: value = "task-2780993" [ 1097.499211] env[68244]: _type = "Task" [ 1097.499211] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.507578] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2780993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.555320] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2780991, 'name': CreateVM_Task, 'duration_secs': 0.471886} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.555593] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.556403] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.556627] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.557032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1097.557337] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f2aafcb-e92f-4e5d-ad2a-100a51617f59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.564240] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1097.564240] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522f3877-24f4-f17f-22dd-4307fabc707c" [ 1097.564240] env[68244]: _type = "Task" [ 1097.564240] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.573092] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522f3877-24f4-f17f-22dd-4307fabc707c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.640256] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3cc6c3a-fe88-43d9-89e3-20592ba0b7d1 tempest-ServerShowV254Test-1568122463 tempest-ServerShowV254Test-1568122463-project-member] Lock "477da9d1-8550-48be-b243-519b4f0ca443" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.585s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.728332] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.729170] env[68244]: DEBUG nova.network.neutron [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Successfully updated port: 3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1097.790787] env[68244]: DEBUG oslo_concurrency.lockutils [req-ee28a1f6-c06e-49a9-bf4b-0d89351f6fee req-ce7536c5-0935-4897-b35e-14c603af8d82 service nova] Releasing lock "refresh_cache-5c4bb8d0-8135-4272-83c2-ef923ac52d4a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.978991] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780990, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.784991} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.980363] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 788e77e1-a356-4342-9ff3-5ad13868fd77/788e77e1-a356-4342-9ff3-5ad13868fd77.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.980363] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1097.980363] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-791f8df4-942f-4813-ba60-325397e054fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.986527] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1097.986527] env[68244]: value = "task-2780994" [ 1097.986527] env[68244]: _type = "Task" [ 1097.986527] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.994569] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780994, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.009091] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2780993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.074407] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522f3877-24f4-f17f-22dd-4307fabc707c, 'name': SearchDatastore_Task, 'duration_secs': 0.070608} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.074688] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.074937] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1098.075194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.075432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.075551] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1098.075809] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b4090ef-b3ce-4b71-904a-405dac1060e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.086352] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1098.086566] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1098.087297] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb7bfb47-8c73-46ae-b6e1-ae7caf4b0ac4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.092277] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1098.092277] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521131e1-b779-f4d8-dbc6-d57116c95df2" [ 1098.092277] env[68244]: _type = "Task" [ 1098.092277] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.099682] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521131e1-b779-f4d8-dbc6-d57116c95df2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.234376] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.234376] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.234376] env[68244]: DEBUG nova.network.neutron [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.234376] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780992, 'name': ReconfigVM_Task, 'duration_secs': 0.570103} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.234819] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Reconfigured VM instance instance-0000003e to attach disk [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32/ed5b8ba3-c8f0-468f-85d1-f36179bfef32.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.235773] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-312244c9-6d43-4c3a-bb90-7be10b296e41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.247151] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1098.247151] env[68244]: value = "task-2780995" [ 1098.247151] env[68244]: _type = "Task" [ 1098.247151] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.259283] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780995, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.398271] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529dea66-54d7-46c7-9a22-1278a866b16a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.407614] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826d665d-45e6-4f9c-9d27-caf02eae7f66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.443496] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e160344d-7418-407f-b51d-eeb3d1d2108f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.454801] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e3af8d-ef42-448e-ba37-a9ecfede2a67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.472087] env[68244]: DEBUG nova.compute.provider_tree [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.498872] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780994, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116945} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.499684] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1098.500401] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3350ab-1f27-4910-9a1f-b01c79da2aad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.528136] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 788e77e1-a356-4342-9ff3-5ad13868fd77/788e77e1-a356-4342-9ff3-5ad13868fd77.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.531842] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ef7a3f8-121f-4b65-9938-c2593f773b95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.549257] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2780993, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.562046] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1098.562046] env[68244]: value = "task-2780996" [ 1098.562046] env[68244]: _type = "Task" [ 1098.562046] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.573439] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780996, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.602674] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521131e1-b779-f4d8-dbc6-d57116c95df2, 'name': SearchDatastore_Task, 'duration_secs': 0.036236} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.603617] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6dfa8b5-1173-4a1e-b552-4da5069ae023 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.609450] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1098.609450] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ef84c2-842e-cd5e-ddc5-d695f54a8a59" [ 1098.609450] env[68244]: _type = "Task" [ 1098.609450] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.619087] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ef84c2-842e-cd5e-ddc5-d695f54a8a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.758497] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780995, 'name': Rename_Task, 'duration_secs': 0.347749} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.758724] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1098.758998] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d1743c0-e0ac-4850-85f4-1e9cb1590b07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.765537] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1098.765537] env[68244]: value = "task-2780997" [ 1098.765537] env[68244]: _type = "Task" [ 1098.765537] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.770157] env[68244]: DEBUG nova.network.neutron [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1098.775331] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.916041] env[68244]: DEBUG nova.network.neutron [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updating instance_info_cache with network_info: [{"id": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "address": "fa:16:3e:fe:07:19", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cbfb410-db", "ovs_interfaceid": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.975813] env[68244]: DEBUG nova.scheduler.client.report [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.001087] env[68244]: DEBUG nova.compute.manager [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Received event network-vif-plugged-3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1099.001087] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] Acquiring lock "75bec02f-82f7-4e8d-81da-3c511588be29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.001087] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] Lock "75bec02f-82f7-4e8d-81da-3c511588be29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.001087] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] Lock "75bec02f-82f7-4e8d-81da-3c511588be29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.001087] env[68244]: DEBUG nova.compute.manager [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] No waiting events found dispatching network-vif-plugged-3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1099.001218] env[68244]: WARNING nova.compute.manager [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Received unexpected event network-vif-plugged-3cbfb410-db85-46ec-ad9d-96a42b67105e for instance with vm_state building and task_state spawning. [ 1099.001299] env[68244]: DEBUG nova.compute.manager [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Received event network-changed-3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1099.001427] env[68244]: DEBUG nova.compute.manager [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Refreshing instance network info cache due to event network-changed-3cbfb410-db85-46ec-ad9d-96a42b67105e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1099.001918] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] Acquiring lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.013487] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2780993, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.11751} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.013734] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f9f6c504-f140-4c90-994b-d3ec2d148796/f9f6c504-f140-4c90-994b-d3ec2d148796.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1099.013941] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1099.014196] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0bd7940-cd12-4228-98c4-d4581fd8da7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.020544] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1099.020544] env[68244]: value = "task-2780998" [ 1099.020544] env[68244]: _type = "Task" [ 1099.020544] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.028369] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2780998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.071299] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780996, 'name': ReconfigVM_Task, 'duration_secs': 0.315297} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.071551] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 788e77e1-a356-4342-9ff3-5ad13868fd77/788e77e1-a356-4342-9ff3-5ad13868fd77.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.072208] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b65aa48-e1b1-4872-adaf-839fe9644715 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.079256] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1099.079256] env[68244]: value = "task-2780999" [ 1099.079256] env[68244]: _type = "Task" [ 1099.079256] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.087469] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780999, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.119737] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ef84c2-842e-cd5e-ddc5-d695f54a8a59, 'name': SearchDatastore_Task, 'duration_secs': 0.015231} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.120055] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.120342] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 5c4bb8d0-8135-4272-83c2-ef923ac52d4a/5c4bb8d0-8135-4272-83c2-ef923ac52d4a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1099.120605] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c66687d6-4ba8-426d-8009-6a18e8bbd92e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.127062] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1099.127062] env[68244]: value = "task-2781000" [ 1099.127062] env[68244]: _type = "Task" [ 1099.127062] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.134805] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.276379] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780997, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.419500] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.419966] env[68244]: DEBUG nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Instance network_info: |[{"id": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "address": "fa:16:3e:fe:07:19", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cbfb410-db", "ovs_interfaceid": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1099.420371] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] Acquired lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.420606] env[68244]: DEBUG nova.network.neutron [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Refreshing network info cache for port 3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1099.422161] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:07:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cbfb410-db85-46ec-ad9d-96a42b67105e', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1099.431016] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating folder: Project (a151f53070d94d08bf7e85617a6f5190). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1099.432359] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9cecdb9-a4fe-4ddb-9126-a3889ed9b7da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.445691] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created folder: Project (a151f53070d94d08bf7e85617a6f5190) in parent group-v558876. [ 1099.445894] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating folder: Instances. Parent ref: group-v559131. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1099.446155] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89944984-f2ac-40f2-81a6-3a9508ec5f82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.458067] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created folder: Instances in parent group-v559131. [ 1099.458341] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1099.458543] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1099.458758] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d296a77a-b8a9-4887-96aa-17600581bdcf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.479478] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1099.479478] env[68244]: value = "task-2781003" [ 1099.479478] env[68244]: _type = "Task" [ 1099.479478] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.480393] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.480967] env[68244]: DEBUG nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1099.486718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.172s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.486929] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.489354] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.360s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.489582] env[68244]: DEBUG nova.objects.instance [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lazy-loading 'resources' on Instance uuid f2e57bf9-05ee-49d8-846d-c3bf5920ae96 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.499045] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781003, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.513423] env[68244]: INFO nova.scheduler.client.report [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted allocations for instance 9658b4e0-f4f9-4628-b700-19d94800961c [ 1099.536146] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2780998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077265} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.536498] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1099.537700] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b8f85f-4b29-4b68-9d83-4d7ffd835903 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.566961] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] f9f6c504-f140-4c90-994b-d3ec2d148796/f9f6c504-f140-4c90-994b-d3ec2d148796.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.567840] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1879e997-def8-454f-b57c-0a5cdd1f111c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.592311] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2780999, 'name': Rename_Task, 'duration_secs': 0.178846} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.593500] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1099.593808] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1099.593808] env[68244]: value = "task-2781004" [ 1099.593808] env[68244]: _type = "Task" [ 1099.593808] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.593990] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52211bb2-a931-4b48-af93-7ea84f4a0e0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.603192] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781004, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.604345] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1099.604345] env[68244]: value = "task-2781005" [ 1099.604345] env[68244]: _type = "Task" [ 1099.604345] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.636366] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489073} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.636628] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 5c4bb8d0-8135-4272-83c2-ef923ac52d4a/5c4bb8d0-8135-4272-83c2-ef923ac52d4a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1099.636856] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1099.637126] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-829a0fb5-6a23-455a-9b59-2045236c0fdb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.642823] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1099.642823] env[68244]: value = "task-2781006" [ 1099.642823] env[68244]: _type = "Task" [ 1099.642823] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.649965] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781006, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.775292] env[68244]: DEBUG oslo_vmware.api [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2780997, 'name': PowerOnVM_Task, 'duration_secs': 0.526765} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.775568] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1099.881339] env[68244]: DEBUG nova.compute.manager [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1099.882381] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098723c2-fd3e-4fae-a4cc-008d2c5e1a1c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.989273] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781003, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.992632] env[68244]: DEBUG nova.compute.utils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1099.996619] env[68244]: DEBUG nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1099.996709] env[68244]: DEBUG nova.network.neutron [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1100.020986] env[68244]: DEBUG oslo_concurrency.lockutils [None req-01660e62-c942-45db-b0ad-fc9dd8595de9 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "9658b4e0-f4f9-4628-b700-19d94800961c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.127s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.069900] env[68244]: DEBUG nova.policy [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e2b78ca269843a0a5541e44727d807b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaf55a7bfa5948d1837855650c1c960b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1100.113209] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.130537] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781005, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.154983] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781006, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.229372} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.155267] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1100.156327] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b3b32d-a4ad-4af9-8ac0-fdae70f6c427 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.178556] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 5c4bb8d0-8135-4272-83c2-ef923ac52d4a/5c4bb8d0-8135-4272-83c2-ef923ac52d4a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.183781] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6235522-f30c-4fb3-8304-4f34d911742c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.211429] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1100.211429] env[68244]: value = "task-2781007" [ 1100.211429] env[68244]: _type = "Task" [ 1100.211429] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.224442] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.264057] env[68244]: DEBUG nova.network.neutron [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updated VIF entry in instance network info cache for port 3cbfb410-db85-46ec-ad9d-96a42b67105e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1100.264413] env[68244]: DEBUG nova.network.neutron [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updating instance_info_cache with network_info: [{"id": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "address": "fa:16:3e:fe:07:19", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cbfb410-db", "ovs_interfaceid": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.363435] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c0057d-455a-4ab6-8c5b-98e41baa754b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.372062] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c01ea0-3368-4049-b109-3b066cd46115 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.418957] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd05eb1-bba1-4260-b6e5-2bcfdcc7004d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.422369] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9b2f6939-0bd7-4dc4-b7b1-f548db4b7c3c tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 42.353s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.427688] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8637f1-cfd0-4a17-989f-55288d3b9b3f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.446636] env[68244]: DEBUG nova.compute.provider_tree [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1100.461813] env[68244]: DEBUG nova.network.neutron [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Successfully created port: 32c0f429-b5b6-4527-9e8b-a057737135b8 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1100.489430] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781003, 'name': CreateVM_Task, 'duration_secs': 0.961535} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.489613] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1100.490381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.490539] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.491092] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1100.491159] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f881bf6e-282e-47b6-9979-f783c6877b3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.495939] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1100.495939] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227191f-fc7e-eb1a-e3ee-d749f1ded277" [ 1100.495939] env[68244]: _type = "Task" [ 1100.495939] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.500210] env[68244]: DEBUG nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1100.508304] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227191f-fc7e-eb1a-e3ee-d749f1ded277, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.607646] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781004, 'name': ReconfigVM_Task, 'duration_secs': 0.961882} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.607969] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Reconfigured VM instance instance-00000059 to attach disk [datastore2] f9f6c504-f140-4c90-994b-d3ec2d148796/f9f6c504-f140-4c90-994b-d3ec2d148796.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1100.608673] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-503e7ee2-0bba-4a5f-abe0-fc8b8b480f4f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.618605] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1100.618605] env[68244]: value = "task-2781008" [ 1100.618605] env[68244]: _type = "Task" [ 1100.618605] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.626535] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781005, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.632413] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781008, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.722553] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781007, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.767653] env[68244]: DEBUG oslo_concurrency.lockutils [req-d12c18ef-5b59-43ac-927a-a0462ce717c1 req-0f6a851e-bb06-4923-917e-58bdef06dd76 service nova] Releasing lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.971740] env[68244]: ERROR nova.scheduler.client.report [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [req-e53618d6-6746-491a-b30a-e1b042be2a8a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e53618d6-6746-491a-b30a-e1b042be2a8a"}]} [ 1100.990499] env[68244]: DEBUG nova.scheduler.client.report [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1101.009108] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227191f-fc7e-eb1a-e3ee-d749f1ded277, 'name': SearchDatastore_Task, 'duration_secs': 0.024091} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.010255] env[68244]: DEBUG nova.scheduler.client.report [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1101.010473] env[68244]: DEBUG nova.compute.provider_tree [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1101.012450] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.012675] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.012902] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.013067] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.013250] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.013691] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-142680b0-7a3f-46b0-9c5f-9d848372626b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.023129] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1101.023327] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1101.024230] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-368bccfa-b33c-4745-b9cb-ef78640bff1c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.026985] env[68244]: DEBUG nova.scheduler.client.report [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1101.031837] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1101.031837] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529b25f9-4469-c310-541c-7d0db96c6f60" [ 1101.031837] env[68244]: _type = "Task" [ 1101.031837] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.043017] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529b25f9-4469-c310-541c-7d0db96c6f60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.047480] env[68244]: DEBUG nova.scheduler.client.report [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1101.126131] env[68244]: DEBUG oslo_vmware.api [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781005, 'name': PowerOnVM_Task, 'duration_secs': 1.068803} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.126670] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1101.126904] env[68244]: INFO nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Took 11.34 seconds to spawn the instance on the hypervisor. [ 1101.127127] env[68244]: DEBUG nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1101.130073] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e199c4c1-b01f-4df2-acf3-327f2538eec6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.135096] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781008, 'name': Rename_Task, 'duration_secs': 0.276634} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.135623] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1101.135865] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-acb318fa-68df-4f38-a2ec-18a8719368dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.146047] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1101.146047] env[68244]: value = "task-2781009" [ 1101.146047] env[68244]: _type = "Task" [ 1101.146047] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.153306] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781009, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.224758] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781007, 'name': ReconfigVM_Task, 'duration_secs': 0.544887} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.225012] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 5c4bb8d0-8135-4272-83c2-ef923ac52d4a/5c4bb8d0-8135-4272-83c2-ef923ac52d4a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.225793] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02c453cd-c45d-4179-a507-07059278d617 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.233016] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1101.233016] env[68244]: value = "task-2781010" [ 1101.233016] env[68244]: _type = "Task" [ 1101.233016] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.241441] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781010, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.304291] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47daee8-a6d2-49bf-9f8b-98c1a8b9685b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.311157] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1085a1e1-559b-4478-aa8a-192b6b8ed9ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.342783] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495d461f-98b4-484e-8688-bbc509b1ffac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.350923] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2192b2e8-6a6e-4c0d-b05c-9b91528cbfc5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.364875] env[68244]: DEBUG nova.compute.provider_tree [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1101.512071] env[68244]: DEBUG nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1101.539098] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.539370] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.539530] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.539713] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.539886] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.540012] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.540245] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.540402] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.540571] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.540733] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.540904] env[68244]: DEBUG nova.virt.hardware [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.541805] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8aeeb1-e6e6-4ed2-90be-2f9081404cd7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.548200] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529b25f9-4469-c310-541c-7d0db96c6f60, 'name': SearchDatastore_Task, 'duration_secs': 0.01487} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.549370] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29d12667-ef1d-4729-9d55-e6b2f4317ceb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.554923] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898bbd35-18ac-46b6-931f-a1cb0cf4ff0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.561876] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1101.561876] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ef0e13-15e2-f6e4-1718-ec4575d3ee2e" [ 1101.561876] env[68244]: _type = "Task" [ 1101.561876] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.577906] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ef0e13-15e2-f6e4-1718-ec4575d3ee2e, 'name': SearchDatastore_Task, 'duration_secs': 0.010528} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.578181] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.578461] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 75bec02f-82f7-4e8d-81da-3c511588be29/75bec02f-82f7-4e8d-81da-3c511588be29.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1101.578713] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6af6f2b7-98a5-4d66-8c93-423f0e300ec1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.591043] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1101.591043] env[68244]: value = "task-2781011" [ 1101.591043] env[68244]: _type = "Task" [ 1101.591043] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.600814] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.654979] env[68244]: INFO nova.compute.manager [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Took 18.71 seconds to build instance. [ 1101.662831] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781009, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.694994] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af760399-0932-41fc-a591-6d3bef35476c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.701296] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Suspending the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1101.701559] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8a13bf80-f424-4859-aa75-981d471281c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.708158] env[68244]: DEBUG oslo_vmware.api [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1101.708158] env[68244]: value = "task-2781012" [ 1101.708158] env[68244]: _type = "Task" [ 1101.708158] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.716621] env[68244]: DEBUG oslo_vmware.api [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781012, 'name': SuspendVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.745030] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781010, 'name': Rename_Task, 'duration_secs': 0.370939} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.745356] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1101.745607] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93b06145-2b25-447c-84f3-fc27351d0b31 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.752962] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1101.752962] env[68244]: value = "task-2781013" [ 1101.752962] env[68244]: _type = "Task" [ 1101.752962] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.765237] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.904224] env[68244]: DEBUG nova.compute.manager [req-8c4d183c-0455-45a4-a0ba-4847a6fceaa0 req-a7b3552e-b488-47dd-b557-ef14c5bccc0e service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Received event network-vif-plugged-32c0f429-b5b6-4527-9e8b-a057737135b8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1101.904224] env[68244]: DEBUG oslo_concurrency.lockutils [req-8c4d183c-0455-45a4-a0ba-4847a6fceaa0 req-a7b3552e-b488-47dd-b557-ef14c5bccc0e service nova] Acquiring lock "4eb691f4-567e-412c-ba04-792ee9a21135-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.904224] env[68244]: DEBUG oslo_concurrency.lockutils [req-8c4d183c-0455-45a4-a0ba-4847a6fceaa0 req-a7b3552e-b488-47dd-b557-ef14c5bccc0e service nova] Lock "4eb691f4-567e-412c-ba04-792ee9a21135-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.904224] env[68244]: DEBUG oslo_concurrency.lockutils [req-8c4d183c-0455-45a4-a0ba-4847a6fceaa0 req-a7b3552e-b488-47dd-b557-ef14c5bccc0e service nova] Lock "4eb691f4-567e-412c-ba04-792ee9a21135-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.904224] env[68244]: DEBUG nova.compute.manager [req-8c4d183c-0455-45a4-a0ba-4847a6fceaa0 req-a7b3552e-b488-47dd-b557-ef14c5bccc0e service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] No waiting events found dispatching network-vif-plugged-32c0f429-b5b6-4527-9e8b-a057737135b8 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1101.904953] env[68244]: WARNING nova.compute.manager [req-8c4d183c-0455-45a4-a0ba-4847a6fceaa0 req-a7b3552e-b488-47dd-b557-ef14c5bccc0e service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Received unexpected event network-vif-plugged-32c0f429-b5b6-4527-9e8b-a057737135b8 for instance with vm_state building and task_state spawning. [ 1101.906227] env[68244]: DEBUG nova.scheduler.client.report [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1101.906668] env[68244]: DEBUG nova.compute.provider_tree [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 134 to 135 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1101.906993] env[68244]: DEBUG nova.compute.provider_tree [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1102.008018] env[68244]: DEBUG nova.network.neutron [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Successfully updated port: 32c0f429-b5b6-4527-9e8b-a057737135b8 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1102.100808] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781011, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.159101] env[68244]: DEBUG oslo_concurrency.lockutils [None req-12b06466-f36f-41a5-af71-fc1bac11edf5 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.230s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.159372] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781009, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.218821] env[68244]: DEBUG oslo_vmware.api [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781012, 'name': SuspendVM_Task} progress is 75%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.262813] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781013, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.415050] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.923s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.415656] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.563s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.417604] env[68244]: INFO nova.compute.claims [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1102.453329] env[68244]: INFO nova.scheduler.client.report [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Deleted allocations for instance f2e57bf9-05ee-49d8-846d-c3bf5920ae96 [ 1102.516644] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "refresh_cache-4eb691f4-567e-412c-ba04-792ee9a21135" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.516644] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "refresh_cache-4eb691f4-567e-412c-ba04-792ee9a21135" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.516644] env[68244]: DEBUG nova.network.neutron [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.600860] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539048} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.602178] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 75bec02f-82f7-4e8d-81da-3c511588be29/75bec02f-82f7-4e8d-81da-3c511588be29.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1102.602178] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1102.602178] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34af0636-6247-4fbf-8612-7b7326c3e972 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.608520] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1102.608520] env[68244]: value = "task-2781014" [ 1102.608520] env[68244]: _type = "Task" [ 1102.608520] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.616802] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781014, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.661370] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781009, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.720279] env[68244]: DEBUG oslo_vmware.api [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781012, 'name': SuspendVM_Task} progress is 75%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.766804] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781013, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.964635] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c8b7aab-5f38-4823-8eca-760eddcae7b6 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "f2e57bf9-05ee-49d8-846d-c3bf5920ae96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.707s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.049710] env[68244]: DEBUG nova.network.neutron [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1103.117738] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781014, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120083} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.121036] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1103.121249] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa3dcdb-627b-4e1a-abfc-2d54c71cabf3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.143226] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 75bec02f-82f7-4e8d-81da-3c511588be29/75bec02f-82f7-4e8d-81da-3c511588be29.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.143767] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb3b665d-a7cd-4992-a682-0547b53232f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.167934] env[68244]: DEBUG oslo_vmware.api [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781009, 'name': PowerOnVM_Task, 'duration_secs': 1.674445} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.169139] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.169735] env[68244]: INFO nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Took 10.92 seconds to spawn the instance on the hypervisor. [ 1103.169735] env[68244]: DEBUG nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.169891] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1103.169891] env[68244]: value = "task-2781015" [ 1103.169891] env[68244]: _type = "Task" [ 1103.169891] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.170554] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b58d51-a665-443a-b555-5c4ee516d877 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.180729] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781015, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.194285] env[68244]: DEBUG nova.network.neutron [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Updating instance_info_cache with network_info: [{"id": "32c0f429-b5b6-4527-9e8b-a057737135b8", "address": "fa:16:3e:da:a0:20", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c0f429-b5", "ovs_interfaceid": "32c0f429-b5b6-4527-9e8b-a057737135b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.221251] env[68244]: DEBUG oslo_vmware.api [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781012, 'name': SuspendVM_Task, 'duration_secs': 1.395059} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.222309] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Suspended the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1103.222601] env[68244]: DEBUG nova.compute.manager [None req-5cb38dc8-52b1-42de-a70d-1c884c32ff0b tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.223591] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13484361-f3db-4ad7-a696-452ca8bb3dd6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.264503] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781013, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.564736] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.565155] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.565313] env[68244]: INFO nova.compute.manager [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Rebooting instance [ 1103.683983] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781015, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.696574] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "refresh_cache-4eb691f4-567e-412c-ba04-792ee9a21135" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.697385] env[68244]: DEBUG nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Instance network_info: |[{"id": "32c0f429-b5b6-4527-9e8b-a057737135b8", "address": "fa:16:3e:da:a0:20", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c0f429-b5", "ovs_interfaceid": "32c0f429-b5b6-4527-9e8b-a057737135b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1103.697385] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:a0:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32c0f429-b5b6-4527-9e8b-a057737135b8', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1103.705206] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1103.705745] env[68244]: INFO nova.compute.manager [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Took 20.02 seconds to build instance. [ 1103.706698] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1103.706929] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5594725d-a108-4fd6-89fc-96c238afff08 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.722504] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8f43d724-5571-41d3-b022-49d814d42599 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.042s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.727483] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1103.727483] env[68244]: value = "task-2781016" [ 1103.727483] env[68244]: _type = "Task" [ 1103.727483] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.737629] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781016, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.764676] env[68244]: DEBUG oslo_vmware.api [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781013, 'name': PowerOnVM_Task, 'duration_secs': 1.64466} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.767366] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.767565] env[68244]: INFO nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1103.767741] env[68244]: DEBUG nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.769018] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2d8969-7e22-4124-9197-4b531b99c081 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.780165] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdbfb64-7681-4f74-82eb-fb8fd42cae62 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.789152] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafa41f8-bce2-4ca4-961c-82e08a58ecc7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.825045] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140e8e63-7af5-498e-b2d7-536d67c1baaa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.833242] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a306798-cacc-45f4-aa5e-a818838a9f98 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.849568] env[68244]: DEBUG nova.compute.provider_tree [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.862033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "b84c2c08-651a-407d-89dd-177bc5d90313" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.862033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "b84c2c08-651a-407d-89dd-177bc5d90313" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.862204] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "b84c2c08-651a-407d-89dd-177bc5d90313-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.862518] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "b84c2c08-651a-407d-89dd-177bc5d90313-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.862936] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "b84c2c08-651a-407d-89dd-177bc5d90313-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.864934] env[68244]: INFO nova.compute.manager [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Terminating instance [ 1103.972254] env[68244]: DEBUG nova.compute.manager [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Received event network-changed-32c0f429-b5b6-4527-9e8b-a057737135b8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1103.972254] env[68244]: DEBUG nova.compute.manager [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Refreshing instance network info cache due to event network-changed-32c0f429-b5b6-4527-9e8b-a057737135b8. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1103.972363] env[68244]: DEBUG oslo_concurrency.lockutils [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] Acquiring lock "refresh_cache-4eb691f4-567e-412c-ba04-792ee9a21135" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.972477] env[68244]: DEBUG oslo_concurrency.lockutils [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] Acquired lock "refresh_cache-4eb691f4-567e-412c-ba04-792ee9a21135" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.972655] env[68244]: DEBUG nova.network.neutron [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Refreshing network info cache for port 32c0f429-b5b6-4527-9e8b-a057737135b8 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.082916] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.084125] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.084125] env[68244]: DEBUG nova.network.neutron [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.183368] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781015, 'name': ReconfigVM_Task, 'duration_secs': 0.529047} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.183670] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 75bec02f-82f7-4e8d-81da-3c511588be29/75bec02f-82f7-4e8d-81da-3c511588be29.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.184337] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dca9041-fa67-4cbd-8311-22f3d58008d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.190446] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1104.190446] env[68244]: value = "task-2781017" [ 1104.190446] env[68244]: _type = "Task" [ 1104.190446] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.198354] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781017, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.237633] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781016, 'name': CreateVM_Task, 'duration_secs': 0.489362} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.237806] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1104.238600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.238771] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.239113] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1104.239580] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2eca420-275c-4ac0-96b9-633ab37ef0e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.244138] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1104.244138] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52df20a5-e7d8-6acd-dc36-e0edb1d1c436" [ 1104.244138] env[68244]: _type = "Task" [ 1104.244138] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.252620] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52df20a5-e7d8-6acd-dc36-e0edb1d1c436, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.292353] env[68244]: INFO nova.compute.manager [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Took 19.46 seconds to build instance. [ 1104.353419] env[68244]: DEBUG nova.scheduler.client.report [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1104.368742] env[68244]: DEBUG nova.compute.manager [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1104.368981] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.369938] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f048a685-dc17-4bfd-adfc-64a389b84903 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.375566] env[68244]: INFO nova.compute.manager [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Resuming [ 1104.376275] env[68244]: DEBUG nova.objects.instance [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lazy-loading 'flavor' on Instance uuid ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.383386] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.383656] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b0e2fc2-de0a-4ba2-9b71-85e333ebd1d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.391591] env[68244]: DEBUG oslo_vmware.api [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1104.391591] env[68244]: value = "task-2781018" [ 1104.391591] env[68244]: _type = "Task" [ 1104.391591] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.401663] env[68244]: DEBUG oslo_vmware.api [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2781018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.704776] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781017, 'name': Rename_Task, 'duration_secs': 0.231228} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.704776] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1104.704776] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-004cb7df-4bd5-4a64-9b85-f9d7ed135049 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.711690] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1104.711690] env[68244]: value = "task-2781019" [ 1104.711690] env[68244]: _type = "Task" [ 1104.711690] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.719907] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.754403] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52df20a5-e7d8-6acd-dc36-e0edb1d1c436, 'name': SearchDatastore_Task, 'duration_secs': 0.011883} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.754741] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.754984] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1104.755243] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.755390] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.756590] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1104.756590] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d7159ca-8df3-49ae-86e1-9417158d9da7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.764314] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1104.764496] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1104.765624] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b829964-dc61-4d57-87b8-1a2fd8c2d0d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.767708] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "f9f6c504-f140-4c90-994b-d3ec2d148796" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.768040] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.768203] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "f9f6c504-f140-4c90-994b-d3ec2d148796-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.768395] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.768566] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.771201] env[68244]: INFO nova.compute.manager [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Terminating instance [ 1104.777116] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1104.777116] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287d5a5-58f0-b19e-95c3-18df3ef5b1d0" [ 1104.777116] env[68244]: _type = "Task" [ 1104.777116] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.784674] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287d5a5-58f0-b19e-95c3-18df3ef5b1d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.794972] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2248be64-988c-4449-a200-26da4bd5fb09 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.971s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.858554] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.859262] env[68244]: DEBUG nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1104.862438] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.251s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.862848] env[68244]: DEBUG nova.objects.instance [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'pci_requests' on Instance uuid d46f6695-7a96-4e0b-b43a-236bcb4ec519 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.864250] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.864477] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.864670] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.864852] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.865033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.867352] env[68244]: INFO nova.compute.manager [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Terminating instance [ 1104.903238] env[68244]: DEBUG oslo_vmware.api [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2781018, 'name': PowerOffVM_Task, 'duration_secs': 0.427506} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.903524] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.903694] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.903947] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e586f54-230c-4283-9971-c6c1dffa0d13 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.977491] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1104.977718] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1104.977972] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Deleting the datastore file [datastore2] b84c2c08-651a-407d-89dd-177bc5d90313 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.978261] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1557b6a-1246-4d4d-8a25-46f964293630 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.985028] env[68244]: DEBUG oslo_vmware.api [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for the task: (returnval){ [ 1104.985028] env[68244]: value = "task-2781021" [ 1104.985028] env[68244]: _type = "Task" [ 1104.985028] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.994274] env[68244]: DEBUG oslo_vmware.api [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2781021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.996087] env[68244]: DEBUG nova.network.neutron [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Updated VIF entry in instance network info cache for port 32c0f429-b5b6-4527-9e8b-a057737135b8. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1104.996087] env[68244]: DEBUG nova.network.neutron [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Updating instance_info_cache with network_info: [{"id": "32c0f429-b5b6-4527-9e8b-a057737135b8", "address": "fa:16:3e:da:a0:20", "network": {"id": "01833773-b06b-46ac-aed8-bfb5221e23fd", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-946078979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf55a7bfa5948d1837855650c1c960b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c0f429-b5", "ovs_interfaceid": "32c0f429-b5b6-4527-9e8b-a057737135b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.093737] env[68244]: DEBUG nova.network.neutron [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.188814] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "df935885-c313-473d-aa3a-ba81aa999554" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.189194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.221861] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781019, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.277186] env[68244]: DEBUG nova.compute.manager [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.277419] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.278435] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db10dac7-5e0d-421e-a58f-af0647d3773a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.289018] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.292336] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6425b969-d273-4478-9a2c-1df27b535390 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.293728] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287d5a5-58f0-b19e-95c3-18df3ef5b1d0, 'name': SearchDatastore_Task, 'duration_secs': 0.009834} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.294756] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-056b7605-fd10-4cce-b9c6-41b6ac78a4f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.298424] env[68244]: DEBUG oslo_vmware.api [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1105.298424] env[68244]: value = "task-2781022" [ 1105.298424] env[68244]: _type = "Task" [ 1105.298424] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.302074] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1105.302074] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5224cb3c-bb46-9923-4543-64f8e84a648a" [ 1105.302074] env[68244]: _type = "Task" [ 1105.302074] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.308064] env[68244]: DEBUG oslo_vmware.api [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.313129] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5224cb3c-bb46-9923-4543-64f8e84a648a, 'name': SearchDatastore_Task, 'duration_secs': 0.00984} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.313381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.313642] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 4eb691f4-567e-412c-ba04-792ee9a21135/4eb691f4-567e-412c-ba04-792ee9a21135.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1105.313894] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-113feb35-c101-4781-a9ba-199e629cf792 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.320768] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1105.320768] env[68244]: value = "task-2781023" [ 1105.320768] env[68244]: _type = "Task" [ 1105.320768] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.328400] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.364779] env[68244]: DEBUG nova.compute.utils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1105.368461] env[68244]: DEBUG nova.objects.instance [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'numa_topology' on Instance uuid d46f6695-7a96-4e0b-b43a-236bcb4ec519 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.370186] env[68244]: DEBUG nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1105.370417] env[68244]: DEBUG nova.network.neutron [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1105.372738] env[68244]: DEBUG nova.compute.manager [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.372939] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.373939] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9aef6e-deb2-46ac-8cb0-58237217c9bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.385497] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.385792] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38a1253e-7a57-4dfd-b12d-d19339436c0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.394171] env[68244]: DEBUG oslo_vmware.api [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1105.394171] env[68244]: value = "task-2781024" [ 1105.394171] env[68244]: _type = "Task" [ 1105.394171] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.402774] env[68244]: DEBUG oslo_vmware.api [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781024, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.431483] env[68244]: DEBUG nova.policy [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a36a2057d8245ddb685bd9d1bcc19e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '713d71c9807247308f468c2ef7ede516', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1105.497706] env[68244]: DEBUG oslo_vmware.api [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Task: {'id': task-2781021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13745} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.497706] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.497706] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.497706] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.497706] env[68244]: INFO nova.compute.manager [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1105.497706] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.497706] env[68244]: DEBUG nova.compute.manager [-] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.497706] env[68244]: DEBUG nova.network.neutron [-] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.499729] env[68244]: DEBUG oslo_concurrency.lockutils [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] Releasing lock "refresh_cache-4eb691f4-567e-412c-ba04-792ee9a21135" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.500127] env[68244]: DEBUG nova.compute.manager [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received event network-changed-e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1105.500336] env[68244]: DEBUG nova.compute.manager [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Refreshing instance network info cache due to event network-changed-e2daf964-3dca-4df6-b310-952aab3796a9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1105.500558] env[68244]: DEBUG oslo_concurrency.lockutils [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] Acquiring lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.500693] env[68244]: DEBUG oslo_concurrency.lockutils [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] Acquired lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.500854] env[68244]: DEBUG nova.network.neutron [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Refreshing network info cache for port e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.599621] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.692399] env[68244]: DEBUG nova.compute.utils [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1105.724022] env[68244]: DEBUG oslo_vmware.api [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781019, 'name': PowerOnVM_Task, 'duration_secs': 0.845276} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.724366] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.724545] env[68244]: INFO nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Took 8.52 seconds to spawn the instance on the hypervisor. [ 1105.724729] env[68244]: DEBUG nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.725603] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91cce16-6fa9-4642-9da5-e545f6dda348 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.812092] env[68244]: DEBUG oslo_vmware.api [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781022, 'name': PowerOffVM_Task, 'duration_secs': 0.3122} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.812944] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.813384] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.813643] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7936f3e-46a2-4299-b404-9afd20e35d24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.837084] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483463} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.837348] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] 4eb691f4-567e-412c-ba04-792ee9a21135/4eb691f4-567e-412c-ba04-792ee9a21135.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1105.837569] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1105.837842] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a816141-ebab-4eac-83fe-03f027613b1b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.844023] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1105.844023] env[68244]: value = "task-2781026" [ 1105.844023] env[68244]: _type = "Task" [ 1105.844023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.851754] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.867767] env[68244]: DEBUG nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1105.870846] env[68244]: INFO nova.compute.claims [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.877646] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.877904] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.878114] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Deleting the datastore file [datastore2] f9f6c504-f140-4c90-994b-d3ec2d148796 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.878586] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c41e58f-40cf-4af7-bf9b-b94af82533f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.884483] env[68244]: DEBUG oslo_vmware.api [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for the task: (returnval){ [ 1105.884483] env[68244]: value = "task-2781027" [ 1105.884483] env[68244]: _type = "Task" [ 1105.884483] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.889044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.892532] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquired lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.892532] env[68244]: DEBUG nova.network.neutron [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1105.896605] env[68244]: DEBUG oslo_vmware.api [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.904547] env[68244]: DEBUG oslo_vmware.api [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781024, 'name': PowerOffVM_Task, 'duration_secs': 0.264226} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.907162] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.907249] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.907631] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23ee69c1-7f10-4f8b-9e82-00df1b319940 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.918639] env[68244]: DEBUG nova.network.neutron [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Successfully created port: 6b35363e-cd67-4df3-a6e4-3fa00355ae06 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1105.967861] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.968107] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.968306] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleting the datastore file [datastore2] 5c4bb8d0-8135-4272-83c2-ef923ac52d4a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.968643] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eaa046b9-0e14-4b1d-b362-b07ef8d7712d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.974742] env[68244]: DEBUG oslo_vmware.api [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1105.974742] env[68244]: value = "task-2781029" [ 1105.974742] env[68244]: _type = "Task" [ 1105.974742] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.982802] env[68244]: DEBUG oslo_vmware.api [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781029, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.106149] env[68244]: DEBUG nova.compute.manager [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1106.107042] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839d7927-afd2-40d0-8505-2ccbbaef2c02 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.196476] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.250760] env[68244]: INFO nova.compute.manager [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Took 20.30 seconds to build instance. [ 1106.275136] env[68244]: DEBUG nova.compute.manager [req-3419e828-dbd5-4348-90e4-dae92512fba3 req-bae6f575-eef1-4eb6-b0cf-d9cb5cffccec service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Received event network-vif-deleted-5da84ae5-3ae6-4d70-b9c2-8281d992dec1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1106.275348] env[68244]: INFO nova.compute.manager [req-3419e828-dbd5-4348-90e4-dae92512fba3 req-bae6f575-eef1-4eb6-b0cf-d9cb5cffccec service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Neutron deleted interface 5da84ae5-3ae6-4d70-b9c2-8281d992dec1; detaching it from the instance and deleting it from the info cache [ 1106.275520] env[68244]: DEBUG nova.network.neutron [req-3419e828-dbd5-4348-90e4-dae92512fba3 req-bae6f575-eef1-4eb6-b0cf-d9cb5cffccec service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.339935] env[68244]: DEBUG nova.network.neutron [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updated VIF entry in instance network info cache for port e2daf964-3dca-4df6-b310-952aab3796a9. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.340377] env[68244]: DEBUG nova.network.neutron [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [{"id": "e2daf964-3dca-4df6-b310-952aab3796a9", "address": "fa:16:3e:7e:5f:a7", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2daf964-3d", "ovs_interfaceid": "e2daf964-3dca-4df6-b310-952aab3796a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.356826] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082352} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.358180] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1106.358597] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a6408c-1060-4dc7-8e32-311c0c61c55b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.382734] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 4eb691f4-567e-412c-ba04-792ee9a21135/4eb691f4-567e-412c-ba04-792ee9a21135.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1106.389492] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a0519de-9aa9-496c-b36b-fc5acc91e35d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.416831] env[68244]: DEBUG oslo_vmware.api [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Task: {'id': task-2781027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14848} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.421275] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.421275] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.421275] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.421275] env[68244]: INFO nova.compute.manager [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1106.421275] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.421275] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1106.421275] env[68244]: value = "task-2781030" [ 1106.421275] env[68244]: _type = "Task" [ 1106.421275] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.421275] env[68244]: DEBUG nova.compute.manager [-] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.421275] env[68244]: DEBUG nova.network.neutron [-] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.422464] env[68244]: DEBUG nova.network.neutron [-] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.434413] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.489555] env[68244]: DEBUG oslo_vmware.api [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781029, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146163} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.489555] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.489555] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.489555] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.489555] env[68244]: INFO nova.compute.manager [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1106.489555] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.489880] env[68244]: DEBUG nova.compute.manager [-] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.489880] env[68244]: DEBUG nova.network.neutron [-] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.714974] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d70bc2-2d1f-47a3-abf8-07548f789713 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.724323] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea73efba-0aef-471b-ab6a-56ea817c3c4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.766303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d9a026aa-54c9-4276-ba1f-cba07ad6feeb tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "75bec02f-82f7-4e8d-81da-3c511588be29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.822s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.767749] env[68244]: DEBUG nova.network.neutron [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [{"id": "abbd3e34-9461-4503-86ee-598fe02a65d3", "address": "fa:16:3e:11:38:d0", "network": {"id": "d9aeda27-209b-46e4-80c8-b0688ec59890", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1599719895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd48f74a8554407593bb2c69b3191d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba4f6497-e2b4-43b5-9819-6927865ae974", "external-id": "nsx-vlan-transportzone-112", "segmentation_id": 112, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabbd3e34-94", "ovs_interfaceid": "abbd3e34-9461-4503-86ee-598fe02a65d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.769684] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b65eb03-2bfe-48af-a4fc-0af066c02ddc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.778613] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0bcdd9-e08e-41f4-a158-bb70feda89ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.785418] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18a2ef93-3000-4a9f-a8ab-ccde8b867c6e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.800418] env[68244]: DEBUG nova.compute.provider_tree [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1106.810328] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d7c9e3-2978-42d1-b777-f7a38522d539 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.849144] env[68244]: DEBUG oslo_concurrency.lockutils [req-de4c3780-2e95-4332-842a-a2bba36f1b6d req-3de0764f-21c6-4e68-9901-453979ed084a service nova] Releasing lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.849144] env[68244]: DEBUG nova.compute.manager [req-3419e828-dbd5-4348-90e4-dae92512fba3 req-bae6f575-eef1-4eb6-b0cf-d9cb5cffccec service nova] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Detach interface failed, port_id=5da84ae5-3ae6-4d70-b9c2-8281d992dec1, reason: Instance b84c2c08-651a-407d-89dd-177bc5d90313 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1106.906054] env[68244]: DEBUG nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1106.928884] env[68244]: INFO nova.compute.manager [-] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Took 1.43 seconds to deallocate network for instance. [ 1106.936357] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.942148] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1106.942330] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.942475] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1106.942651] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.942793] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1106.942947] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1106.943310] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1106.943476] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1106.943639] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1106.943801] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1106.943968] env[68244]: DEBUG nova.virt.hardware [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1106.944903] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c10c38c-afa7-4247-951e-b70eb70793ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.954945] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c05fd38-72f7-4ded-bb94-4edeed59bd12 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.128029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeadc38a-a239-450b-a51d-d2c1c327d4b9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.134805] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Doing hard reboot of VM {{(pid=68244) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1107.138790] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-693eb609-6601-49e1-a393-e220ff9fd1fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.145201] env[68244]: DEBUG oslo_vmware.api [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1107.145201] env[68244]: value = "task-2781031" [ 1107.145201] env[68244]: _type = "Task" [ 1107.145201] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.154201] env[68244]: DEBUG oslo_vmware.api [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781031, 'name': ResetVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.156663] env[68244]: DEBUG nova.compute.manager [req-daf989e4-a35b-4b7d-b904-a19131d0739a req-a3666bae-fbe0-413f-848f-f68df6fde284 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Received event network-vif-deleted-6d9dfff4-722c-4e27-9f86-56f3dc8eee19 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1107.156871] env[68244]: INFO nova.compute.manager [req-daf989e4-a35b-4b7d-b904-a19131d0739a req-a3666bae-fbe0-413f-848f-f68df6fde284 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Neutron deleted interface 6d9dfff4-722c-4e27-9f86-56f3dc8eee19; detaching it from the instance and deleting it from the info cache [ 1107.157059] env[68244]: DEBUG nova.network.neutron [req-daf989e4-a35b-4b7d-b904-a19131d0739a req-a3666bae-fbe0-413f-848f-f68df6fde284 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.274770] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Releasing lock "refresh_cache-ed5b8ba3-c8f0-468f-85d1-f36179bfef32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.276365] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34b504c-cd37-4cc8-bbd8-4e5ce84def4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.287021] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Resuming the VM {{(pid=68244) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1107.287343] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e5c6976-30ed-4f1f-9985-182fa22ae130 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.294520] env[68244]: DEBUG oslo_vmware.api [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1107.294520] env[68244]: value = "task-2781032" [ 1107.294520] env[68244]: _type = "Task" [ 1107.294520] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.304129] env[68244]: DEBUG oslo_vmware.api [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.307988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "df935885-c313-473d-aa3a-ba81aa999554" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.307988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.307988] env[68244]: INFO nova.compute.manager [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Attaching volume 4b2ae9b1-b0ee-4218-8c94-6e3f817e161a to /dev/sdb [ 1107.350157] env[68244]: DEBUG nova.scheduler.client.report [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1107.350488] env[68244]: DEBUG nova.compute.provider_tree [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 135 to 136 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1107.350677] env[68244]: DEBUG nova.compute.provider_tree [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1107.355875] env[68244]: DEBUG nova.network.neutron [-] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.363601] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a5b1c5-cc11-49dd-bc33-814025382303 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.371629] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa6bb16-1e6e-4d2f-91f6-7fc3820a337b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.389151] env[68244]: DEBUG nova.virt.block_device [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating existing volume attachment record: bb5ba656-fb84-4f47-b1f4-90414874feae {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1107.434134] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.449502] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.597725] env[68244]: DEBUG nova.network.neutron [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Successfully updated port: 6b35363e-cd67-4df3-a6e4-3fa00355ae06 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.622341] env[68244]: DEBUG nova.network.neutron [-] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.657605] env[68244]: DEBUG oslo_vmware.api [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781031, 'name': ResetVM_Task, 'duration_secs': 0.1136} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.657970] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Did hard reboot of VM {{(pid=68244) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1107.658468] env[68244]: DEBUG nova.compute.manager [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.659627] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7b5175-236f-4ae7-8820-e2fb909b436c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.663896] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bf7e183-7c52-48a2-b510-ca0c4374975e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.676161] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814cfcd4-1fdf-4028-9245-8b731d55ee58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.712800] env[68244]: DEBUG nova.compute.manager [req-daf989e4-a35b-4b7d-b904-a19131d0739a req-a3666bae-fbe0-413f-848f-f68df6fde284 service nova] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Detach interface failed, port_id=6d9dfff4-722c-4e27-9f86-56f3dc8eee19, reason: Instance 5c4bb8d0-8135-4272-83c2-ef923ac52d4a could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1107.808019] env[68244]: DEBUG oslo_vmware.api [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781032, 'name': PowerOnVM_Task} progress is 93%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.860099] env[68244]: INFO nova.compute.manager [-] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Took 1.44 seconds to deallocate network for instance. [ 1107.860099] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.997s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.865024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.553s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.865024] env[68244]: INFO nova.compute.claims [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1107.902486] env[68244]: INFO nova.network.neutron [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating port 9bcf5292-c53f-42bf-97f1-7f616748f9ed with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1107.937882] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781030, 'name': ReconfigVM_Task, 'duration_secs': 1.10609} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.938304] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 4eb691f4-567e-412c-ba04-792ee9a21135/4eb691f4-567e-412c-ba04-792ee9a21135.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1107.939319] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73f6d646-e105-4aab-a339-7ca713e55475 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.948649] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1107.948649] env[68244]: value = "task-2781036" [ 1107.948649] env[68244]: _type = "Task" [ 1107.948649] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.959677] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781036, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.104281] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.104606] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.104844] env[68244]: DEBUG nova.network.neutron [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1108.128964] env[68244]: INFO nova.compute.manager [-] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Took 1.64 seconds to deallocate network for instance. [ 1108.178264] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e0b40a06-c567-4340-ac34-8431172e3758 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.613s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.306899] env[68244]: DEBUG oslo_vmware.api [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781032, 'name': PowerOnVM_Task, 'duration_secs': 0.72278} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.307255] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Resumed the VM {{(pid=68244) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1108.307493] env[68244]: DEBUG nova.compute.manager [None req-4d345d65-ef6e-4770-a2e5-fdd6a194aa8a tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1108.308380] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d69a91-5cb7-49f1-9e63-f3cddede93ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.353351] env[68244]: DEBUG nova.compute.manager [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Received event network-vif-deleted-f7fd89ae-e48f-4a24-baad-9b7ce30dfea2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1108.353483] env[68244]: DEBUG nova.compute.manager [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Received event network-vif-plugged-6b35363e-cd67-4df3-a6e4-3fa00355ae06 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1108.353742] env[68244]: DEBUG oslo_concurrency.lockutils [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] Acquiring lock "91422c89-601c-4e5f-b5b0-fa2639031d3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.353890] env[68244]: DEBUG oslo_concurrency.lockutils [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.354074] env[68244]: DEBUG oslo_concurrency.lockutils [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.354277] env[68244]: DEBUG nova.compute.manager [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] No waiting events found dispatching network-vif-plugged-6b35363e-cd67-4df3-a6e4-3fa00355ae06 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1108.354460] env[68244]: WARNING nova.compute.manager [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Received unexpected event network-vif-plugged-6b35363e-cd67-4df3-a6e4-3fa00355ae06 for instance with vm_state building and task_state spawning. [ 1108.354651] env[68244]: DEBUG nova.compute.manager [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Received event network-changed-6b35363e-cd67-4df3-a6e4-3fa00355ae06 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1108.354820] env[68244]: DEBUG nova.compute.manager [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Refreshing instance network info cache due to event network-changed-6b35363e-cd67-4df3-a6e4-3fa00355ae06. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1108.355009] env[68244]: DEBUG oslo_concurrency.lockutils [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] Acquiring lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.377585] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.459692] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781036, 'name': Rename_Task, 'duration_secs': 0.157064} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.460044] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1108.460332] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29ba7306-c7a2-4794-8b2a-0cbc629a1fea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.467392] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1108.467392] env[68244]: value = "task-2781037" [ 1108.467392] env[68244]: _type = "Task" [ 1108.467392] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.478553] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781037, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.636323] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.649017] env[68244]: DEBUG nova.network.neutron [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1108.887145] env[68244]: DEBUG nova.network.neutron [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Updating instance_info_cache with network_info: [{"id": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "address": "fa:16:3e:38:7e:40", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b35363e-cd", "ovs_interfaceid": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.982513] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781037, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.155779] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53df48f2-d612-454d-a1fe-1ebf06ea5304 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.162817] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5bac4e-a592-48d4-9f31-9f7fbd150b37 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.193427] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71a4e87-4108-434f-b760-1f3c2e559cac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.196879] env[68244]: DEBUG nova.compute.manager [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Received event network-changed-3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1109.197083] env[68244]: DEBUG nova.compute.manager [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Refreshing instance network info cache due to event network-changed-3cbfb410-db85-46ec-ad9d-96a42b67105e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1109.197285] env[68244]: DEBUG oslo_concurrency.lockutils [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] Acquiring lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.197428] env[68244]: DEBUG oslo_concurrency.lockutils [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] Acquired lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.197583] env[68244]: DEBUG nova.network.neutron [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Refreshing network info cache for port 3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.208689] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fb70e3-2c01-43f9-ae6c-6b7ea7b491d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.224728] env[68244]: DEBUG nova.compute.provider_tree [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.395809] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.396159] env[68244]: DEBUG nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Instance network_info: |[{"id": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "address": "fa:16:3e:38:7e:40", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b35363e-cd", "ovs_interfaceid": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1109.396542] env[68244]: DEBUG oslo_concurrency.lockutils [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] Acquired lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.396871] env[68244]: DEBUG nova.network.neutron [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Refreshing network info cache for port 6b35363e-cd67-4df3-a6e4-3fa00355ae06 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.398124] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:7e:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b35363e-cd67-4df3-a6e4-3fa00355ae06', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1109.409101] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1109.410468] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1109.410728] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96b5d650-278c-4f6c-8211-f61cfb3b1adf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.434706] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1109.434706] env[68244]: value = "task-2781038" [ 1109.434706] env[68244]: _type = "Task" [ 1109.434706] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.440335] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.440335] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.440335] env[68244]: DEBUG nova.network.neutron [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1109.445744] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781038, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.479698] env[68244]: DEBUG oslo_vmware.api [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781037, 'name': PowerOnVM_Task, 'duration_secs': 0.56007} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.480316] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1109.480523] env[68244]: INFO nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1109.480708] env[68244]: DEBUG nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1109.481524] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6bde07-698a-4492-90d9-786b4aefd79f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.728721] env[68244]: DEBUG nova.scheduler.client.report [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1109.926377] env[68244]: DEBUG nova.network.neutron [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updated VIF entry in instance network info cache for port 3cbfb410-db85-46ec-ad9d-96a42b67105e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.926643] env[68244]: DEBUG nova.network.neutron [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updating instance_info_cache with network_info: [{"id": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "address": "fa:16:3e:fe:07:19", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cbfb410-db", "ovs_interfaceid": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.947366] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781038, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.002976] env[68244]: INFO nova.compute.manager [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Took 22.52 seconds to build instance. [ 1110.180820] env[68244]: DEBUG nova.network.neutron [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.235778] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.236358] env[68244]: DEBUG nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1110.239360] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.790s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.239825] env[68244]: DEBUG nova.objects.instance [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lazy-loading 'resources' on Instance uuid b84c2c08-651a-407d-89dd-177bc5d90313 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.289688] env[68244]: DEBUG nova.network.neutron [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Updated VIF entry in instance network info cache for port 6b35363e-cd67-4df3-a6e4-3fa00355ae06. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1110.290099] env[68244]: DEBUG nova.network.neutron [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Updating instance_info_cache with network_info: [{"id": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "address": "fa:16:3e:38:7e:40", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b35363e-cd", "ovs_interfaceid": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.423110] env[68244]: DEBUG nova.compute.manager [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-vif-plugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1110.423353] env[68244]: DEBUG oslo_concurrency.lockutils [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.423821] env[68244]: DEBUG oslo_concurrency.lockutils [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.424028] env[68244]: DEBUG oslo_concurrency.lockutils [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.424211] env[68244]: DEBUG nova.compute.manager [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] No waiting events found dispatching network-vif-plugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1110.424395] env[68244]: WARNING nova.compute.manager [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received unexpected event network-vif-plugged-9bcf5292-c53f-42bf-97f1-7f616748f9ed for instance with vm_state shelved_offloaded and task_state spawning. [ 1110.424556] env[68244]: DEBUG nova.compute.manager [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1110.424715] env[68244]: DEBUG nova.compute.manager [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing instance network info cache due to event network-changed-9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1110.425961] env[68244]: DEBUG oslo_concurrency.lockutils [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] Acquiring lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.429146] env[68244]: DEBUG oslo_concurrency.lockutils [req-63767514-9b4b-4501-9686-3a531ef83165 req-c8f3547e-7779-41b9-a635-4d3da5b05073 service nova] Releasing lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.447040] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781038, 'name': CreateVM_Task, 'duration_secs': 0.634608} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.447274] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1110.448019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.448224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.448541] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1110.448840] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cae4061d-4eac-4cd2-9abc-55f7887342e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.453453] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1110.453453] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e18fb1-4d40-c21d-2c9f-273ef806774e" [ 1110.453453] env[68244]: _type = "Task" [ 1110.453453] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.462706] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e18fb1-4d40-c21d-2c9f-273ef806774e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.508341] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f15f8097-54a5-4646-ab22-d67ac7978cef tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "4eb691f4-567e-412c-ba04-792ee9a21135" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.037s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.683267] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.685702] env[68244]: DEBUG oslo_concurrency.lockutils [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] Acquired lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.685883] env[68244]: DEBUG nova.network.neutron [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Refreshing network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1110.714716] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='322d4ed8a451069dbd0e3dfbebb85262',container_format='bare',created_at=2025-03-06T03:27:37Z,direct_url=,disk_format='vmdk',id=a370b67b-27dc-4eb4-8e12-ce720e17af0d,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1358959373-shelved',owner='d41b4d274faa4f5a8951d39fa0d0c714',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-03-06T03:27:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1110.714997] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1110.715176] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1110.715363] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1110.715510] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1110.715657] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1110.715863] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1110.716030] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1110.716201] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1110.716360] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1110.716528] env[68244]: DEBUG nova.virt.hardware [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1110.717395] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1412633-4b38-4dcf-9797-bbb6196b6648 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.725347] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e93553c-1dcf-441a-8b55-78f6f1243340 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.739250] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:f5:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9bcf5292-c53f-42bf-97f1-7f616748f9ed', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1110.746535] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.749891] env[68244]: DEBUG nova.compute.utils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1110.751138] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1110.751588] env[68244]: DEBUG nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1110.751755] env[68244]: DEBUG nova.network.neutron [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1110.756020] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f020d71d-c0f9-4e10-8769-2c5b22c603cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.773709] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1110.773709] env[68244]: value = "task-2781040" [ 1110.773709] env[68244]: _type = "Task" [ 1110.773709] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.784117] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781040, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.792821] env[68244]: DEBUG oslo_concurrency.lockutils [req-64515a7e-f67a-461f-be4b-e7d918accb7d req-cb767b07-5661-46ff-8bc0-7f67662cee2b service nova] Releasing lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.815352] env[68244]: DEBUG nova.policy [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f93ab312c1f44d7877c43a7b101cb5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4349b19805a8498392649e1b825d5da7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1110.964795] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e18fb1-4d40-c21d-2c9f-273ef806774e, 'name': SearchDatastore_Task, 'duration_secs': 0.032638} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.967591] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.967910] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1110.968196] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.968345] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.968524] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1110.969754] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4e967ff-9350-4b00-8372-d22205e42025 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.976910] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1110.977109] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1110.977847] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ed07657-c434-4932-8a6f-e8470140a526 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.987100] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1110.987100] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5284c0cd-20cc-6f09-9749-380296192161" [ 1110.987100] env[68244]: _type = "Task" [ 1110.987100] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.995481] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5284c0cd-20cc-6f09-9749-380296192161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.029857] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c883b1f7-36cf-4bc7-bcdc-c4a317500b0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.037273] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed65fa08-bd74-49c1-9299-d847be8218e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.082181] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0221c68-03ca-4464-9862-e13ff7dd7abe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.090366] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffe3107-4fdd-456b-9939-860f99d5dca8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.107724] env[68244]: DEBUG nova.compute.provider_tree [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.246173] env[68244]: DEBUG nova.network.neutron [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Successfully created port: 9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1111.268637] env[68244]: DEBUG nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1111.286750] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781040, 'name': CreateVM_Task, 'duration_secs': 0.307237} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.286935] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1111.287735] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.288151] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.288588] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1111.288920] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a338a7f-c9c4-4719-8e02-19e858c70b5d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.293928] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1111.293928] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529cdb91-855a-ce67-2212-b09f73623fa4" [ 1111.293928] env[68244]: _type = "Task" [ 1111.293928] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.303389] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529cdb91-855a-ce67-2212-b09f73623fa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.433986] env[68244]: DEBUG nova.network.neutron [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updated VIF entry in instance network info cache for port 9bcf5292-c53f-42bf-97f1-7f616748f9ed. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1111.434380] env[68244]: DEBUG nova.network.neutron [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [{"id": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "address": "fa:16:3e:52:f5:5c", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bcf5292-c5", "ovs_interfaceid": "9bcf5292-c53f-42bf-97f1-7f616748f9ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.498272] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5284c0cd-20cc-6f09-9749-380296192161, 'name': SearchDatastore_Task, 'duration_secs': 0.008539} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.499155] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-942fc1b5-23a8-458d-97b7-544e8d4d9f9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.504889] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1111.504889] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287ceed-4717-b975-4113-22e669bd1197" [ 1111.504889] env[68244]: _type = "Task" [ 1111.504889] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.514252] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287ceed-4717-b975-4113-22e669bd1197, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.546933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "4eb691f4-567e-412c-ba04-792ee9a21135" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.547224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "4eb691f4-567e-412c-ba04-792ee9a21135" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.547442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "4eb691f4-567e-412c-ba04-792ee9a21135-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.547623] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "4eb691f4-567e-412c-ba04-792ee9a21135-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.547795] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "4eb691f4-567e-412c-ba04-792ee9a21135-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.550104] env[68244]: INFO nova.compute.manager [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Terminating instance [ 1111.613817] env[68244]: DEBUG nova.scheduler.client.report [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1111.805993] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.806270] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Processing image a370b67b-27dc-4eb4-8e12-ce720e17af0d {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1111.806514] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d/a370b67b-27dc-4eb4-8e12-ce720e17af0d.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.806665] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d/a370b67b-27dc-4eb4-8e12-ce720e17af0d.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.806844] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.807110] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-054a508f-4e14-422f-ab7e-e1d5d464f8f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.822388] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.822583] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1111.823319] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ab5de7-919b-48c2-b869-2d7e8bddb15d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.828306] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1111.828306] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52383ff2-fbd1-e7e9-e898-3649ca2185c1" [ 1111.828306] env[68244]: _type = "Task" [ 1111.828306] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.835445] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52383ff2-fbd1-e7e9-e898-3649ca2185c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.937642] env[68244]: DEBUG oslo_concurrency.lockutils [req-4fc26e5e-0b23-4bf1-8cbe-249788659552 req-158f9f95-aafc-4644-878b-0e0b5fe1da62 service nova] Releasing lock "refresh_cache-d46f6695-7a96-4e0b-b43a-236bcb4ec519" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.015448] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5287ceed-4717-b975-4113-22e669bd1197, 'name': SearchDatastore_Task, 'duration_secs': 0.009926} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.015806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.015973] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/91422c89-601c-4e5f-b5b0-fa2639031d3e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1112.016250] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4d05dda-65b4-45a5-83a2-8b2d575633d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.023329] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1112.023329] env[68244]: value = "task-2781041" [ 1112.023329] env[68244]: _type = "Task" [ 1112.023329] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.030697] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.054645] env[68244]: DEBUG nova.compute.manager [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1112.054866] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.055738] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a756791-4c11-40d2-9291-a41071f89201 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.062541] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1112.062770] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b98cf64e-87c6-4350-b2fc-0e704845b6de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.068360] env[68244]: DEBUG oslo_vmware.api [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1112.068360] env[68244]: value = "task-2781042" [ 1112.068360] env[68244]: _type = "Task" [ 1112.068360] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.075978] env[68244]: DEBUG oslo_vmware.api [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.119503] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.880s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.121930] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.744s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.122186] env[68244]: DEBUG nova.objects.instance [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lazy-loading 'resources' on Instance uuid f9f6c504-f140-4c90-994b-d3ec2d148796 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.144012] env[68244]: INFO nova.scheduler.client.report [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Deleted allocations for instance b84c2c08-651a-407d-89dd-177bc5d90313 [ 1112.282208] env[68244]: DEBUG nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1112.312500] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1112.312693] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.312990] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1112.313144] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.313296] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1112.313488] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1112.313741] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1112.313889] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1112.314503] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1112.314503] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1112.314503] env[68244]: DEBUG nova.virt.hardware [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1112.315415] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a9ab0f-0be9-40f1-8f89-7a5a530014f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.325856] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12f3f59-a0cc-48ba-b35f-b8f214fa81f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.348799] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Preparing fetch location {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1112.349074] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Fetch image to [datastore2] OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61/OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61.vmdk {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1112.349269] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Downloading stream optimized image a370b67b-27dc-4eb4-8e12-ce720e17af0d to [datastore2] OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61/OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61.vmdk on the data store datastore2 as vApp {{(pid=68244) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1112.349438] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Downloading image file data a370b67b-27dc-4eb4-8e12-ce720e17af0d to the ESX as VM named 'OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61' {{(pid=68244) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1112.427398] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1112.427398] env[68244]: value = "resgroup-9" [ 1112.427398] env[68244]: _type = "ResourcePool" [ 1112.427398] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1112.427782] env[68244]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4610a111-bfc9-4fea-9800-1e6e2eb80a5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.450037] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1112.450342] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559136', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'name': 'volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df935885-c313-473d-aa3a-ba81aa999554', 'attached_at': '', 'detached_at': '', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'serial': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1112.451846] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20058641-45a7-4f49-8963-41b9cdf1b9d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.456197] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease: (returnval){ [ 1112.456197] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da9f76-7c87-4035-af2f-f74a89adad67" [ 1112.456197] env[68244]: _type = "HttpNfcLease" [ 1112.456197] env[68244]: } obtained for vApp import into resource pool (val){ [ 1112.456197] env[68244]: value = "resgroup-9" [ 1112.456197] env[68244]: _type = "ResourcePool" [ 1112.456197] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1112.456461] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the lease: (returnval){ [ 1112.456461] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da9f76-7c87-4035-af2f-f74a89adad67" [ 1112.456461] env[68244]: _type = "HttpNfcLease" [ 1112.456461] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1112.475362] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f7d56f-55f5-409c-b95c-165bc8aecb96 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.482013] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1112.482013] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da9f76-7c87-4035-af2f-f74a89adad67" [ 1112.482013] env[68244]: _type = "HttpNfcLease" [ 1112.482013] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1112.508812] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a/volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.509254] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-455868cc-a7c2-40a7-a675-3539bcb10f7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.536782] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781041, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.538198] env[68244]: DEBUG oslo_vmware.api [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1112.538198] env[68244]: value = "task-2781044" [ 1112.538198] env[68244]: _type = "Task" [ 1112.538198] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.546908] env[68244]: DEBUG oslo_vmware.api [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781044, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.578275] env[68244]: DEBUG oslo_vmware.api [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781042, 'name': PowerOffVM_Task, 'duration_secs': 0.176208} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.579490] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.579490] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.579490] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad58a4c0-6559-4873-8267-0adbcbf08309 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.639883] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.640184] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.640390] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleting the datastore file [datastore1] 4eb691f4-567e-412c-ba04-792ee9a21135 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.640720] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d03b5928-28cd-4006-8c84-86d5179b3fd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.654486] env[68244]: DEBUG oslo_vmware.api [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for the task: (returnval){ [ 1112.654486] env[68244]: value = "task-2781046" [ 1112.654486] env[68244]: _type = "Task" [ 1112.654486] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.657783] env[68244]: DEBUG oslo_concurrency.lockutils [None req-351ae813-07ba-4edf-8287-2fdbf9735412 tempest-ListImageFiltersTestJSON-2040076982 tempest-ListImageFiltersTestJSON-2040076982-project-member] Lock "b84c2c08-651a-407d-89dd-177bc5d90313" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.796s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.667678] env[68244]: DEBUG oslo_vmware.api [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.895090] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec2a5be-2b40-4ea0-bf9d-e42bda00e494 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.900749] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fa941f-6f14-4b24-91cf-9e040814d93d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.945949] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb961d01-9c81-48f2-bf33-455caa803ac9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.954483] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc309ebf-b961-4684-8298-2389cca80aa7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.973553] env[68244]: DEBUG nova.compute.provider_tree [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.976428] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1112.976428] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da9f76-7c87-4035-af2f-f74a89adad67" [ 1112.976428] env[68244]: _type = "HttpNfcLease" [ 1112.976428] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1113.035848] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781041, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537724} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.035848] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/91422c89-601c-4e5f-b5b0-fa2639031d3e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1113.036300] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.036446] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6879b6ad-a95a-4ffd-9404-449e45c388e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.047344] env[68244]: DEBUG oslo_vmware.api [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781044, 'name': ReconfigVM_Task, 'duration_secs': 0.39784} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.049988] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfigured VM instance instance-00000054 to attach disk [datastore2] volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a/volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1113.055108] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1113.055108] env[68244]: value = "task-2781047" [ 1113.055108] env[68244]: _type = "Task" [ 1113.055108] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.055343] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91f39591-bb95-4dc7-bf07-00a67be3f89f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.074673] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781047, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.075882] env[68244]: DEBUG oslo_vmware.api [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1113.075882] env[68244]: value = "task-2781048" [ 1113.075882] env[68244]: _type = "Task" [ 1113.075882] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.083652] env[68244]: DEBUG oslo_vmware.api [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.092279] env[68244]: DEBUG nova.compute.manager [req-2b4875a6-a34f-417f-bca0-ba3f8767b220 req-a5cc8dae-c6cc-4070-8e34-7e22de165f7e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Received event network-vif-plugged-9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1113.092556] env[68244]: DEBUG oslo_concurrency.lockutils [req-2b4875a6-a34f-417f-bca0-ba3f8767b220 req-a5cc8dae-c6cc-4070-8e34-7e22de165f7e service nova] Acquiring lock "10e67250-5ddc-430d-aac7-4e6bae0778e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.092800] env[68244]: DEBUG oslo_concurrency.lockutils [req-2b4875a6-a34f-417f-bca0-ba3f8767b220 req-a5cc8dae-c6cc-4070-8e34-7e22de165f7e service nova] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.093020] env[68244]: DEBUG oslo_concurrency.lockutils [req-2b4875a6-a34f-417f-bca0-ba3f8767b220 req-a5cc8dae-c6cc-4070-8e34-7e22de165f7e service nova] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.093219] env[68244]: DEBUG nova.compute.manager [req-2b4875a6-a34f-417f-bca0-ba3f8767b220 req-a5cc8dae-c6cc-4070-8e34-7e22de165f7e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] No waiting events found dispatching network-vif-plugged-9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1113.093423] env[68244]: WARNING nova.compute.manager [req-2b4875a6-a34f-417f-bca0-ba3f8767b220 req-a5cc8dae-c6cc-4070-8e34-7e22de165f7e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Received unexpected event network-vif-plugged-9d57c368-4817-44e7-a55f-02a83f75dabc for instance with vm_state building and task_state spawning. [ 1113.168695] env[68244]: DEBUG oslo_vmware.api [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Task: {'id': task-2781046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21773} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.168947] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.169173] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.169354] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.169544] env[68244]: INFO nova.compute.manager [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1113.171785] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.172075] env[68244]: DEBUG nova.compute.manager [-] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.172183] env[68244]: DEBUG nova.network.neutron [-] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.195693] env[68244]: DEBUG nova.network.neutron [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Successfully updated port: 9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1113.468023] env[68244]: DEBUG nova.compute.manager [req-ebb635a6-376a-4570-8b9e-0e45dcaa14d5 req-9c654bc5-32c6-4014-9aec-3f0fd8a37626 service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Received event network-vif-deleted-32c0f429-b5b6-4527-9e8b-a057737135b8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1113.468412] env[68244]: INFO nova.compute.manager [req-ebb635a6-376a-4570-8b9e-0e45dcaa14d5 req-9c654bc5-32c6-4014-9aec-3f0fd8a37626 service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Neutron deleted interface 32c0f429-b5b6-4527-9e8b-a057737135b8; detaching it from the instance and deleting it from the info cache [ 1113.468620] env[68244]: DEBUG nova.network.neutron [req-ebb635a6-376a-4570-8b9e-0e45dcaa14d5 req-9c654bc5-32c6-4014-9aec-3f0fd8a37626 service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.471532] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1113.471532] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da9f76-7c87-4035-af2f-f74a89adad67" [ 1113.471532] env[68244]: _type = "HttpNfcLease" [ 1113.471532] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1113.472114] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1113.472114] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52da9f76-7c87-4035-af2f-f74a89adad67" [ 1113.472114] env[68244]: _type = "HttpNfcLease" [ 1113.472114] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1113.472699] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58a6b41-7b15-4444-89c9-3856b82e84b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.482549] env[68244]: DEBUG nova.scheduler.client.report [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1113.485720] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252a4d9-1c0c-0982-6171-680f1ba9025a/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1113.485900] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252a4d9-1c0c-0982-6171-680f1ba9025a/disk-0.vmdk. {{(pid=68244) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1113.553611] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f81cfb8e-1b71-4527-8fee-d7c97153aaca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.575150] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781047, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066293} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.575402] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.576672] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a36302-2b11-486a-8623-43dded00b4b9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.603753] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/91422c89-601c-4e5f-b5b0-fa2639031d3e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.607780] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c88bf6d4-1c2a-4349-a055-eab6fc32618f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.622518] env[68244]: DEBUG oslo_vmware.api [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781048, 'name': ReconfigVM_Task, 'duration_secs': 0.141266} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.623272] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559136', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'name': 'volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'df935885-c313-473d-aa3a-ba81aa999554', 'attached_at': '', 'detached_at': '', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'serial': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1113.632650] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1113.632650] env[68244]: value = "task-2781049" [ 1113.632650] env[68244]: _type = "Task" [ 1113.632650] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.639453] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.704211] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.704211] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.704211] env[68244]: DEBUG nova.network.neutron [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.810551] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.811178] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.811272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.811525] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.811812] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.815813] env[68244]: INFO nova.compute.manager [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Terminating instance [ 1113.953303] env[68244]: DEBUG nova.network.neutron [-] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.973295] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36c4bd85-4dc0-4ca9-95de-112950eec88b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.983941] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f2263e-0bee-4c82-8147-e3d0b821446a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.000164] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.002717] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.366s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.002975] env[68244]: DEBUG nova.objects.instance [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lazy-loading 'resources' on Instance uuid 5c4bb8d0-8135-4272-83c2-ef923ac52d4a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.030202] env[68244]: DEBUG nova.compute.manager [req-ebb635a6-376a-4570-8b9e-0e45dcaa14d5 req-9c654bc5-32c6-4014-9aec-3f0fd8a37626 service nova] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Detach interface failed, port_id=32c0f429-b5b6-4527-9e8b-a057737135b8, reason: Instance 4eb691f4-567e-412c-ba04-792ee9a21135 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1114.031498] env[68244]: INFO nova.scheduler.client.report [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Deleted allocations for instance f9f6c504-f140-4c90-994b-d3ec2d148796 [ 1114.140874] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781049, 'name': ReconfigVM_Task, 'duration_secs': 0.501049} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.141516] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/91422c89-601c-4e5f-b5b0-fa2639031d3e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.142898] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed67502f-33ea-4bc4-9ac6-f13013ac9cf9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.150186] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1114.150186] env[68244]: value = "task-2781050" [ 1114.150186] env[68244]: _type = "Task" [ 1114.150186] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.165211] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781050, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.250887] env[68244]: DEBUG nova.network.neutron [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1114.323389] env[68244]: DEBUG nova.compute.manager [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1114.323631] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.324544] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc7bf8b-e701-414b-a504-8296424bc753 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.333529] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.333864] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e089b01-c1e6-4dcd-bd96-271fac9f461c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.343540] env[68244]: DEBUG oslo_vmware.api [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1114.343540] env[68244]: value = "task-2781051" [ 1114.343540] env[68244]: _type = "Task" [ 1114.343540] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.354632] env[68244]: DEBUG oslo_vmware.api [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.442899] env[68244]: DEBUG nova.network.neutron [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Updating instance_info_cache with network_info: [{"id": "9d57c368-4817-44e7-a55f-02a83f75dabc", "address": "fa:16:3e:f1:05:a9", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d57c368-48", "ovs_interfaceid": "9d57c368-4817-44e7-a55f-02a83f75dabc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.455836] env[68244]: INFO nova.compute.manager [-] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Took 1.28 seconds to deallocate network for instance. [ 1114.548581] env[68244]: DEBUG oslo_concurrency.lockutils [None req-82cd2c33-9229-488a-9cd3-8dd16f658f28 tempest-ServerAddressesNegativeTestJSON-1435391658 tempest-ServerAddressesNegativeTestJSON-1435391658-project-member] Lock "f9f6c504-f140-4c90-994b-d3ec2d148796" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.780s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.661035] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781050, 'name': Rename_Task, 'duration_secs': 0.171583} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.663888] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1114.666340] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa6f98db-98e0-4d57-a1ff-279a11862407 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.672878] env[68244]: DEBUG nova.objects.instance [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'flavor' on Instance uuid df935885-c313-473d-aa3a-ba81aa999554 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.677306] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1114.677306] env[68244]: value = "task-2781052" [ 1114.677306] env[68244]: _type = "Task" [ 1114.677306] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.686956] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.814296] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ce7172-3808-4d3a-b156-c75cb574795c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.819073] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Completed reading data from the image iterator. {{(pid=68244) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1114.819345] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252a4d9-1c0c-0982-6171-680f1ba9025a/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1114.820521] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1884aa4d-dc4b-4afe-84f2-f3377dc7348c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.829845] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0258d236-6fe9-49e4-8e87-d6981e99f0d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.833145] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252a4d9-1c0c-0982-6171-680f1ba9025a/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1114.833360] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252a4d9-1c0c-0982-6171-680f1ba9025a/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1114.833920] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-05161524-a360-4a0d-ae54-e26785b42bbf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.869007] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db25756-29a1-4b13-8676-b0cba32a624d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.877417] env[68244]: DEBUG oslo_vmware.api [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781051, 'name': PowerOffVM_Task, 'duration_secs': 0.243863} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.880036] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.880338] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.880660] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b3749c2-2a7a-4f58-b7ca-0e4955409e2e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.883295] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ae40bf-5e51-492c-9b7c-d034afe4c4b9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.897967] env[68244]: DEBUG nova.compute.provider_tree [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.945903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.946261] env[68244]: DEBUG nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Instance network_info: |[{"id": "9d57c368-4817-44e7-a55f-02a83f75dabc", "address": "fa:16:3e:f1:05:a9", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d57c368-48", "ovs_interfaceid": "9d57c368-4817-44e7-a55f-02a83f75dabc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1114.946729] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:05:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d57c368-4817-44e7-a55f-02a83f75dabc', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.954925] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1114.955235] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1114.955485] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9414e945-b709-4fe2-af65-5db2a11b535b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.972770] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.973563] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.974009] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.974194] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleting the datastore file [datastore2] ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.974950] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9cc8e92-a66e-42bb-aa1a-7b9890048c3e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.979180] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.979180] env[68244]: value = "task-2781054" [ 1114.979180] env[68244]: _type = "Task" [ 1114.979180] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.983681] env[68244]: DEBUG oslo_vmware.api [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for the task: (returnval){ [ 1114.983681] env[68244]: value = "task-2781055" [ 1114.983681] env[68244]: _type = "Task" [ 1114.983681] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.990673] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781054, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.995357] env[68244]: DEBUG oslo_vmware.api [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.028760] env[68244]: DEBUG oslo_vmware.rw_handles [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252a4d9-1c0c-0982-6171-680f1ba9025a/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1115.029079] env[68244]: INFO nova.virt.vmwareapi.images [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Downloaded image file data a370b67b-27dc-4eb4-8e12-ce720e17af0d [ 1115.030343] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6b6b4a-78c5-4149-9afd-4c75d0eadb85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.049271] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5902a578-20b5-4360-a697-4dfbf0bcf4fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.075750] env[68244]: INFO nova.virt.vmwareapi.images [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] The imported VM was unregistered [ 1115.078348] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Caching image {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1115.078592] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating directory with path [datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1115.078909] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb340444-5780-4a7f-b5bf-9e80638425ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.088892] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created directory with path [datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1115.089123] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61/OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61.vmdk to [datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d/a370b67b-27dc-4eb4-8e12-ce720e17af0d.vmdk. {{(pid=68244) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1115.089383] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6e118a7d-4768-4308-ab75-8197a4ac64ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.096013] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1115.096013] env[68244]: value = "task-2781057" [ 1115.096013] env[68244]: _type = "Task" [ 1115.096013] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.106590] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781057, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.125787] env[68244]: DEBUG nova.compute.manager [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Received event network-changed-9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1115.126062] env[68244]: DEBUG nova.compute.manager [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Refreshing instance network info cache due to event network-changed-9d57c368-4817-44e7-a55f-02a83f75dabc. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1115.126311] env[68244]: DEBUG oslo_concurrency.lockutils [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] Acquiring lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.126701] env[68244]: DEBUG oslo_concurrency.lockutils [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] Acquired lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.126950] env[68244]: DEBUG nova.network.neutron [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Refreshing network info cache for port 9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1115.180963] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3a2e5133-65f3-4284-a3c5-a9391f0692a4 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.873s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.195233] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781052, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.404028] env[68244]: DEBUG nova.scheduler.client.report [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.497793] env[68244]: DEBUG oslo_vmware.api [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Task: {'id': task-2781055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193799} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.503778] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.503778] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.503778] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.503778] env[68244]: INFO nova.compute.manager [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1115.503778] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.503778] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781054, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.503778] env[68244]: DEBUG nova.compute.manager [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1115.503778] env[68244]: DEBUG nova.network.neutron [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1115.611457] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781057, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.696202] env[68244]: DEBUG oslo_vmware.api [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781052, 'name': PowerOnVM_Task, 'duration_secs': 0.611328} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.696571] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1115.696830] env[68244]: INFO nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Took 8.79 seconds to spawn the instance on the hypervisor. [ 1115.697076] env[68244]: DEBUG nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1115.698656] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55de09d4-19b0-4a65-82c9-999a6a1d47c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.909162] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.919266] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.946s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.920214] env[68244]: DEBUG nova.objects.instance [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lazy-loading 'resources' on Instance uuid 4eb691f4-567e-412c-ba04-792ee9a21135 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.946312] env[68244]: INFO nova.scheduler.client.report [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted allocations for instance 5c4bb8d0-8135-4272-83c2-ef923ac52d4a [ 1115.995574] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781054, 'name': CreateVM_Task, 'duration_secs': 0.580907} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.003135] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1116.008740] env[68244]: DEBUG nova.compute.manager [req-d74741ad-0b39-4dc8-bbad-1eb249a297b1 req-791e8bde-8184-42bf-9f7e-63fce5df1a04 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Received event network-vif-deleted-abbd3e34-9461-4503-86ee-598fe02a65d3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1116.008970] env[68244]: INFO nova.compute.manager [req-d74741ad-0b39-4dc8-bbad-1eb249a297b1 req-791e8bde-8184-42bf-9f7e-63fce5df1a04 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Neutron deleted interface abbd3e34-9461-4503-86ee-598fe02a65d3; detaching it from the instance and deleting it from the info cache [ 1116.009175] env[68244]: DEBUG nova.network.neutron [req-d74741ad-0b39-4dc8-bbad-1eb249a297b1 req-791e8bde-8184-42bf-9f7e-63fce5df1a04 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.011460] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.011637] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.011933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1116.014895] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a079c143-1cbb-473c-bcf0-fbbdaa2ebfcd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.022169] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1116.022169] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521e6800-f81f-ce72-168b-a4765c5cff6c" [ 1116.022169] env[68244]: _type = "Task" [ 1116.022169] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.032958] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521e6800-f81f-ce72-168b-a4765c5cff6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.112956] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781057, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.227643] env[68244]: INFO nova.compute.manager [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Took 24.39 seconds to build instance. [ 1116.257532] env[68244]: DEBUG nova.network.neutron [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Updated VIF entry in instance network info cache for port 9d57c368-4817-44e7-a55f-02a83f75dabc. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1116.258255] env[68244]: DEBUG nova.network.neutron [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Updating instance_info_cache with network_info: [{"id": "9d57c368-4817-44e7-a55f-02a83f75dabc", "address": "fa:16:3e:f1:05:a9", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d57c368-48", "ovs_interfaceid": "9d57c368-4817-44e7-a55f-02a83f75dabc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.400637] env[68244]: DEBUG nova.network.neutron [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.455618] env[68244]: DEBUG oslo_concurrency.lockutils [None req-49d024d7-bac0-4417-bdbc-1be6e6b07716 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "5c4bb8d0-8135-4272-83c2-ef923ac52d4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.591s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.514994] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-371aa6c3-fe57-4db0-8208-e83481ec166f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.537564] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5735b84f-3c99-41d7-b211-ffff046b28e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.557088] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521e6800-f81f-ce72-168b-a4765c5cff6c, 'name': SearchDatastore_Task, 'duration_secs': 0.091485} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.557969] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.558236] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1116.558478] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.558625] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.558807] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1116.559154] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48732030-181f-45d3-a17e-51dd839bce9a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.589680] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1116.589923] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1116.590861] env[68244]: DEBUG nova.compute.manager [req-d74741ad-0b39-4dc8-bbad-1eb249a297b1 req-791e8bde-8184-42bf-9f7e-63fce5df1a04 service nova] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Detach interface failed, port_id=abbd3e34-9461-4503-86ee-598fe02a65d3, reason: Instance ed5b8ba3-c8f0-468f-85d1-f36179bfef32 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1116.594068] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2b1492a-39e6-4483-a969-92309e650dce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.602863] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1116.602863] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525299b4-8059-b4c2-cba4-05b2f20ce443" [ 1116.602863] env[68244]: _type = "Task" [ 1116.602863] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.609711] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781057, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.618870] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525299b4-8059-b4c2-cba4-05b2f20ce443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.732206] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7d78c88d-5e43-4d16-b0fd-5b58a53f3660 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.905s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.757477] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4a10ef-d41b-402c-b69f-703836850fe7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.761137] env[68244]: DEBUG oslo_concurrency.lockutils [req-8140bd44-13aa-4230-9749-976c0106e55f req-5eec88e8-7a73-4eec-a3ae-441df5acd24e service nova] Releasing lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.766948] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44fdf32-7eac-4d81-916a-556accf75065 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.809480] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec7763d-8001-442d-a7e4-7bf2c7368014 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.820569] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88c4fe2-cf3b-40f3-ab0f-894ffe96fa8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.838588] env[68244]: DEBUG nova.compute.provider_tree [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.862341] env[68244]: DEBUG nova.compute.manager [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1116.905616] env[68244]: INFO nova.compute.manager [-] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Took 1.40 seconds to deallocate network for instance. [ 1117.120950] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525299b4-8059-b4c2-cba4-05b2f20ce443, 'name': SearchDatastore_Task, 'duration_secs': 0.087492} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.124843] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781057, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.125136] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-700196fa-e2c5-4eba-9324-16764e6b5fdc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.132077] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1117.132077] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285881f-7d6a-4d5f-06d2-8234d4500708" [ 1117.132077] env[68244]: _type = "Task" [ 1117.132077] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.142142] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285881f-7d6a-4d5f-06d2-8234d4500708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.342763] env[68244]: DEBUG nova.scheduler.client.report [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.388127] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.418778] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.451382] env[68244]: INFO nova.compute.manager [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Rescuing [ 1117.452041] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.452887] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.452887] env[68244]: DEBUG nova.network.neutron [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1117.612262] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781057, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.485245} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.612559] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61/OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61.vmdk to [datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d/a370b67b-27dc-4eb4-8e12-ce720e17af0d.vmdk. [ 1117.612747] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Cleaning up location [datastore2] OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61 {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1117.612913] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_f53f13bc-30d7-4933-af25-23cddac59a61 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1117.613183] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5495734-4cd5-4384-8c4f-a7a5564781ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.621192] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1117.621192] env[68244]: value = "task-2781058" [ 1117.621192] env[68244]: _type = "Task" [ 1117.621192] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.629446] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.641359] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5285881f-7d6a-4d5f-06d2-8234d4500708, 'name': SearchDatastore_Task, 'duration_secs': 0.083145} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.641615] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.641880] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 10e67250-5ddc-430d-aac7-4e6bae0778e5/10e67250-5ddc-430d-aac7-4e6bae0778e5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1117.642385] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7bf1439-cde7-4bc9-a0e0-12478ec34e6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.649524] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1117.649524] env[68244]: value = "task-2781059" [ 1117.649524] env[68244]: _type = "Task" [ 1117.649524] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.658271] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.716797] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "d4fd9092-9081-4be0-b33d-c175be24f12e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.717376] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.850700] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.931s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.862243] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.469s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.940259] env[68244]: INFO nova.scheduler.client.report [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Deleted allocations for instance 4eb691f4-567e-412c-ba04-792ee9a21135 [ 1118.132137] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04021} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.132641] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.132955] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d/a370b67b-27dc-4eb4-8e12-ce720e17af0d.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.133347] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d/a370b67b-27dc-4eb4-8e12-ce720e17af0d.vmdk to [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1118.136065] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa62971b-54ef-4571-ab86-974087a92ed0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.142019] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1118.142019] env[68244]: value = "task-2781060" [ 1118.142019] env[68244]: _type = "Task" [ 1118.142019] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.151357] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.159772] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464588} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.160250] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 10e67250-5ddc-430d-aac7-4e6bae0778e5/10e67250-5ddc-430d-aac7-4e6bae0778e5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1118.160637] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1118.161025] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2edcaef6-2da9-4ba3-a819-ba7f305173bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.167220] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1118.167220] env[68244]: value = "task-2781061" [ 1118.167220] env[68244]: _type = "Task" [ 1118.167220] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.176878] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.220657] env[68244]: DEBUG nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1118.234447] env[68244]: DEBUG nova.network.neutron [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Updating instance_info_cache with network_info: [{"id": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "address": "fa:16:3e:38:7e:40", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b35363e-cd", "ovs_interfaceid": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.364316] env[68244]: INFO nova.compute.claims [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.426658] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.428179] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.448426] env[68244]: DEBUG oslo_concurrency.lockutils [None req-18a7e4b3-5878-4941-95b9-d794a4ed5436 tempest-ServerDiskConfigTestJSON-2120003706 tempest-ServerDiskConfigTestJSON-2120003706-project-member] Lock "4eb691f4-567e-412c-ba04-792ee9a21135" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.901s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.652478] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781060, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.680719] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064907} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.681020] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1118.681877] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba6702f-20ca-4c4b-a017-cdb96ac3ebc7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.706855] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 10e67250-5ddc-430d-aac7-4e6bae0778e5/10e67250-5ddc-430d-aac7-4e6bae0778e5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.707277] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b515cfe-9c54-4678-a282-610a98b2b25c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.731390] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1118.731390] env[68244]: value = "task-2781062" [ 1118.731390] env[68244]: _type = "Task" [ 1118.731390] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.737718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.743919] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781062, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.752550] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.871699] env[68244]: INFO nova.compute.resource_tracker [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating resource usage from migration fdae44ec-31ae-4e7f-8b05-aba60785fd58 [ 1118.931255] env[68244]: DEBUG nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1119.148399] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff648ed-8678-4953-816d-2387672b237c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.155223] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781060, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.161225] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725eecf2-b390-462c-a4f5-0219eb63b6ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.196918] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948ac9e2-e30e-48cc-bc00-ff2d0facb8b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.205224] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd139c44-ea4c-46f0-a716-e0853d927dc6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.222242] env[68244]: DEBUG nova.compute.provider_tree [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.242382] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781062, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.460204] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.650793] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781060, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.742077] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781062, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.750646] env[68244]: ERROR nova.scheduler.client.report [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [req-01884811-8f94-4757-841f-135928cd345c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-01884811-8f94-4757-841f-135928cd345c"}]} [ 1119.770831] env[68244]: DEBUG nova.scheduler.client.report [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1119.787225] env[68244]: DEBUG nova.scheduler.client.report [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1119.787225] env[68244]: DEBUG nova.compute.provider_tree [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.803781] env[68244]: DEBUG nova.scheduler.client.report [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1119.825431] env[68244]: DEBUG nova.scheduler.client.report [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1120.154259] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781060, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.172962] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6650b2ff-dd64-4f1f-88f5-9513ec875b5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.180655] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bd1023-9c2b-4bb2-876b-651dc152a1c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.217633] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979eccbc-3f88-4727-a262-860f28621540 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.229470] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93c879f-7158-45ca-b2e9-96af406fb644 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.245757] env[68244]: DEBUG nova.compute.provider_tree [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.256197] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781062, 'name': ReconfigVM_Task, 'duration_secs': 1.073165} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.256197] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 10e67250-5ddc-430d-aac7-4e6bae0778e5/10e67250-5ddc-430d-aac7-4e6bae0778e5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1120.256769] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39cfc7d7-3f2c-44cb-9dd6-72ddbd6f342f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.266497] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1120.266497] env[68244]: value = "task-2781063" [ 1120.266497] env[68244]: _type = "Task" [ 1120.266497] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.278607] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781063, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.295702] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1120.295702] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4629edfd-0fdc-4b23-93a7-bb1049ba9846 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.307026] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1120.307026] env[68244]: value = "task-2781064" [ 1120.307026] env[68244]: _type = "Task" [ 1120.307026] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.317206] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.653685] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781060, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.376431} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.653685] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a370b67b-27dc-4eb4-8e12-ce720e17af0d/a370b67b-27dc-4eb4-8e12-ce720e17af0d.vmdk to [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.654291] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbe4799-635e-4692-b39d-849d5c9065a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.677862] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.678312] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3399630-dfe3-4fdf-8aa7-869a08b63bb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.698899] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1120.698899] env[68244]: value = "task-2781065" [ 1120.698899] env[68244]: _type = "Task" [ 1120.698899] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.708561] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781065, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.775821] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781063, 'name': Rename_Task, 'duration_secs': 0.305698} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.776630] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1120.776931] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c128f96a-2852-4835-b616-89e74259526b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.779117] env[68244]: DEBUG nova.scheduler.client.report [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1120.779359] env[68244]: DEBUG nova.compute.provider_tree [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 140 to 141 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1120.779535] env[68244]: DEBUG nova.compute.provider_tree [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.788131] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1120.788131] env[68244]: value = "task-2781066" [ 1120.788131] env[68244]: _type = "Task" [ 1120.788131] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.795913] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781066, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.816872] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781064, 'name': PowerOffVM_Task, 'duration_secs': 0.406638} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.817193] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1120.818778] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cd1d7e-73b2-4cb0-a77a-6868a3b400da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.839110] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416e79ae-5c90-4513-94db-62ce3700a353 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.873249] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1120.873563] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-871eeeac-2950-496b-8889-e24f8a6678fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.879800] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1120.879800] env[68244]: value = "task-2781067" [ 1120.879800] env[68244]: _type = "Task" [ 1120.879800] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.887380] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781067, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.209901] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781065, 'name': ReconfigVM_Task, 'duration_secs': 0.281705} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.210282] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfigured VM instance instance-00000034 to attach disk [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519/d46f6695-7a96-4e0b-b43a-236bcb4ec519.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.211793] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encrypted': False, 'size': 0, 'boot_index': 0, 'device_type': 'disk', 'guest_format': None, 'disk_bus': None, 'device_name': '/dev/sda', 'encryption_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'image_id': '9aa0b4d1-af1b-4141-9ca6-95525b722d7e'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'mount_device': '/dev/sdb', 'boot_index': None, 'guest_format': None, 'attachment_id': 'a5a1b442-e6dc-4561-8ac5-b0af986aaf44', 'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559123', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'name': 'volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd46f6695-7a96-4e0b-b43a-236bcb4ec519', 'attached_at': '', 'detached_at': '', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'serial': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb'}, 'volume_type': None}], 'swap': None} {{(pid=68244) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1121.211992] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1121.212235] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559123', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'name': 'volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd46f6695-7a96-4e0b-b43a-236bcb4ec519', 'attached_at': '', 'detached_at': '', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'serial': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1121.213091] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa998cab-7443-40b3-8f89-2199696b3c15 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.229593] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3103c327-1af3-40c6-b1ac-56325130f67d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.257131] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb/volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.257483] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c353a709-ca51-45e2-ba60-0230880fabfa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.276024] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1121.276024] env[68244]: value = "task-2781068" [ 1121.276024] env[68244]: _type = "Task" [ 1121.276024] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.284551] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.427s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.284758] env[68244]: INFO nova.compute.manager [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Migrating [ 1121.291424] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.295666] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.877s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.295842] env[68244]: DEBUG nova.objects.instance [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lazy-loading 'resources' on Instance uuid ed5b8ba3-c8f0-468f-85d1-f36179bfef32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.311468] env[68244]: DEBUG oslo_vmware.api [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781066, 'name': PowerOnVM_Task, 'duration_secs': 0.471267} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.312121] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1121.312121] env[68244]: INFO nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1121.312247] env[68244]: DEBUG nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.313384] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c65082-5ed8-427d-bd16-103892494a79 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.390478] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1121.390756] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1121.392032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.392032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.392032] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1121.392032] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccf3ffa3-0ec9-4840-905d-ecff6adf1c0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.401679] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1121.402171] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1121.402810] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0b4d59a-b06d-4fb6-bd45-891c19abce08 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.408706] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1121.408706] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52426af1-0b82-cdd1-0e85-980321053806" [ 1121.408706] env[68244]: _type = "Task" [ 1121.408706] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.419277] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52426af1-0b82-cdd1-0e85-980321053806, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.787123] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781068, 'name': ReconfigVM_Task, 'duration_secs': 0.295087} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.787561] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfigured VM instance instance-00000034 to attach disk [datastore2] volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb/volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.792320] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7118f98e-22db-4717-b594-f9883e66e7d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.806302] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.806464] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.806628] env[68244]: DEBUG nova.network.neutron [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.811192] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1121.811192] env[68244]: value = "task-2781069" [ 1121.811192] env[68244]: _type = "Task" [ 1121.811192] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.819026] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.837624] env[68244]: INFO nova.compute.manager [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Took 26.55 seconds to build instance. [ 1121.919369] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52426af1-0b82-cdd1-0e85-980321053806, 'name': SearchDatastore_Task, 'duration_secs': 0.009644} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.923947] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86d81266-9879-41c1-8e43-e59a89b7f0bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.933025] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1121.933025] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520402e8-8941-f3d5-f02f-a113828e3787" [ 1121.933025] env[68244]: _type = "Task" [ 1121.933025] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.940292] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520402e8-8941-f3d5-f02f-a113828e3787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.087577] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb6fd23-32a9-4594-94fa-4c8312e3c9e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.096872] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf7212c-6600-4989-82cf-a016f4aabdf3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.139019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41c69ec-a80d-4b4f-8929-0e3b8cb24e4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.146025] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb55b678-4c14-4619-b19a-40018b1f9342 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.163918] env[68244]: DEBUG nova.compute.provider_tree [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.321943] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781069, 'name': ReconfigVM_Task, 'duration_secs': 0.139403} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.322162] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559123', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'name': 'volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd46f6695-7a96-4e0b-b43a-236bcb4ec519', 'attached_at': '', 'detached_at': '', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'serial': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1122.322752] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ff1b761-ff1d-4bfe-8339-df67ebdee7bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.330615] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1122.330615] env[68244]: value = "task-2781070" [ 1122.330615] env[68244]: _type = "Task" [ 1122.330615] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.339638] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d33ca53f-8d0b-41ab-8dae-1e92149d5037 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.062s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.339837] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781070, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.407636] env[68244]: DEBUG nova.compute.manager [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Received event network-changed-9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1122.407842] env[68244]: DEBUG nova.compute.manager [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Refreshing instance network info cache due to event network-changed-9d57c368-4817-44e7-a55f-02a83f75dabc. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1122.408101] env[68244]: DEBUG oslo_concurrency.lockutils [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] Acquiring lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.408260] env[68244]: DEBUG oslo_concurrency.lockutils [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] Acquired lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.408423] env[68244]: DEBUG nova.network.neutron [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Refreshing network info cache for port 9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1122.442913] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520402e8-8941-f3d5-f02f-a113828e3787, 'name': SearchDatastore_Task, 'duration_secs': 0.010241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.443288] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.443647] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. {{(pid=68244) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1122.444486] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75fcfeaa-b6a3-4c3d-bb4b-47d14d5f745a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.453803] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1122.453803] env[68244]: value = "task-2781071" [ 1122.453803] env[68244]: _type = "Task" [ 1122.453803] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.470534] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.589902] env[68244]: DEBUG nova.network.neutron [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.668417] env[68244]: DEBUG nova.scheduler.client.report [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.841492] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781070, 'name': Rename_Task, 'duration_secs': 0.156608} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.842156] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.842156] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-645816cf-f4d3-47a6-bd09-f2ba47af41c2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.851582] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1122.851582] env[68244]: value = "task-2781072" [ 1122.851582] env[68244]: _type = "Task" [ 1122.851582] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.865625] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.964659] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781071, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.094475] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.176631] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.182173] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.430s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.189477] env[68244]: INFO nova.compute.claims [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.211714] env[68244]: INFO nova.scheduler.client.report [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Deleted allocations for instance ed5b8ba3-c8f0-468f-85d1-f36179bfef32 [ 1123.305634] env[68244]: DEBUG nova.network.neutron [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Updated VIF entry in instance network info cache for port 9d57c368-4817-44e7-a55f-02a83f75dabc. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.306029] env[68244]: DEBUG nova.network.neutron [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Updating instance_info_cache with network_info: [{"id": "9d57c368-4817-44e7-a55f-02a83f75dabc", "address": "fa:16:3e:f1:05:a9", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d57c368-48", "ovs_interfaceid": "9d57c368-4817-44e7-a55f-02a83f75dabc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.365118] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781072, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.465056] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541974} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.465344] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk. [ 1123.466181] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91030dd-8227-4817-944d-c004171890f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.497021] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1123.497021] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50f6d701-8447-45bf-ba7c-469fa4a2cc6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.514728] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1123.514728] env[68244]: value = "task-2781073" [ 1123.514728] env[68244]: _type = "Task" [ 1123.514728] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.523631] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.720031] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d17b3622-b87f-48b0-815b-3570c0555eba tempest-ServersNegativeTestJSON-1623060957 tempest-ServersNegativeTestJSON-1623060957-project-member] Lock "ed5b8ba3-c8f0-468f-85d1-f36179bfef32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.909s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.808852] env[68244]: DEBUG oslo_concurrency.lockutils [req-18af25a3-fdf3-46b4-ae4a-42559a91acdb req-967671fb-41e8-4c16-9ed4-167b5f20e212 service nova] Releasing lock "refresh_cache-10e67250-5ddc-430d-aac7-4e6bae0778e5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.866316] env[68244]: DEBUG oslo_vmware.api [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781072, 'name': PowerOnVM_Task, 'duration_secs': 0.734043} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.867052] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.007052] env[68244]: DEBUG nova.compute.manager [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.008707] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc93191-9be3-49ae-a7c3-eaaab6e8b8f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.030723] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781073, 'name': ReconfigVM_Task, 'duration_secs': 0.305813} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.032075] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e-rescue.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1124.032075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbd9f89-55b4-4941-b15a-6acdf684ed2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.060892] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67b01733-acef-4103-9fae-f3792650e910 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.077586] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1124.077586] env[68244]: value = "task-2781074" [ 1124.077586] env[68244]: _type = "Task" [ 1124.077586] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.094810] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781074, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.498146] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26aa2073-9a13-4fae-a4cc-d29a04da7111 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.506075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6d9182-216f-4cfb-a1e9-b41daa6ddd31 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.542329] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72cebd1-ad62-4815-b060-954ac9fafa24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.548078] env[68244]: DEBUG oslo_concurrency.lockutils [None req-301d5103-b14e-4534-a366-b31b7da6dd3d tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 39.849s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.552626] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b6a12c-e3ae-4b9e-a140-89b2b6edce56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.566346] env[68244]: DEBUG nova.compute.provider_tree [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.586611] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781074, 'name': ReconfigVM_Task, 'duration_secs': 0.177756} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.586896] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1124.587206] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83ed8b7f-5117-4cde-8dbd-653b36ad83e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.594354] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1124.594354] env[68244]: value = "task-2781075" [ 1124.594354] env[68244]: _type = "Task" [ 1124.594354] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.603386] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781075, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.617107] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694df735-1155-419b-afd0-9c0e5cd7c1fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.639091] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance 'df935885-c313-473d-aa3a-ba81aa999554' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.646649] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.646881] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.725610] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.726454] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.070222] env[68244]: DEBUG nova.scheduler.client.report [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.110253] env[68244]: DEBUG oslo_vmware.api [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781075, 'name': PowerOnVM_Task, 'duration_secs': 0.405621} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.110560] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1125.115312] env[68244]: DEBUG nova.compute.manager [None req-c3ae831a-9f4b-442b-956a-c94ca32c1e5c tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1125.116123] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd18b1b-3693-4f7b-a577-3580f92b296c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.148582] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.149038] env[68244]: DEBUG nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1125.151629] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6476bcca-8ec8-40db-9706-0f8cbdc392a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.159637] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1125.159637] env[68244]: value = "task-2781076" [ 1125.159637] env[68244]: _type = "Task" [ 1125.159637] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.176109] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.231607] env[68244]: INFO nova.compute.manager [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Detaching volume 26161298-d0e0-46f2-a061-361dc4332cca [ 1125.283480] env[68244]: INFO nova.virt.block_device [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Attempting to driver detach volume 26161298-d0e0-46f2-a061-361dc4332cca from mountpoint /dev/sdb [ 1125.283746] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1125.283944] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559108', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'name': 'volume-26161298-d0e0-46f2-a061-361dc4332cca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cedcff81-0010-4fa6-95bf-72a4dcac5427', 'attached_at': '', 'detached_at': '', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'serial': '26161298-d0e0-46f2-a061-361dc4332cca'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1125.284889] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00b2572-6909-4635-be4c-5355d3526531 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.310272] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f517a6-f212-409f-9a0a-a88af4769e51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.317372] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cc99fd-321f-4821-87c7-f99a7c9a64ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.338942] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9521ea2-42c7-4868-8f1b-e1261d6ce852 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.355355] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] The volume has not been displaced from its original location: [datastore2] volume-26161298-d0e0-46f2-a061-361dc4332cca/volume-26161298-d0e0-46f2-a061-361dc4332cca.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1125.364943] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfiguring VM instance instance-00000038 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1125.365378] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7699dac7-5938-479d-ad00-acc0e37266cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.385972] env[68244]: DEBUG oslo_vmware.api [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1125.385972] env[68244]: value = "task-2781077" [ 1125.385972] env[68244]: _type = "Task" [ 1125.385972] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.394784] env[68244]: DEBUG oslo_vmware.api [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781077, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.583027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.583027] env[68244]: DEBUG nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1125.585882] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.128s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.588222] env[68244]: INFO nova.compute.claims [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1125.671334] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781076, 'name': PowerOffVM_Task, 'duration_secs': 0.294194} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.671484] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.671653] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance 'df935885-c313-473d-aa3a-ba81aa999554' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1125.680029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.896267] env[68244]: DEBUG oslo_vmware.api [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781077, 'name': ReconfigVM_Task, 'duration_secs': 0.336457} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.896568] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Reconfigured VM instance instance-00000038 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1125.901340] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0ab65b3-bf09-47ae-b3bb-627b314faf6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.917617] env[68244]: DEBUG oslo_vmware.api [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1125.917617] env[68244]: value = "task-2781078" [ 1125.917617] env[68244]: _type = "Task" [ 1125.917617] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.925479] env[68244]: DEBUG oslo_vmware.api [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.094262] env[68244]: DEBUG nova.compute.utils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1126.098322] env[68244]: DEBUG nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1126.099518] env[68244]: DEBUG nova.network.neutron [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1126.115954] env[68244]: INFO nova.compute.manager [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Unrescuing [ 1126.116103] env[68244]: DEBUG oslo_concurrency.lockutils [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.116270] env[68244]: DEBUG oslo_concurrency.lockutils [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquired lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.116750] env[68244]: DEBUG nova.network.neutron [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.142149] env[68244]: DEBUG nova.policy [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25327ec3d0214898ab5e9467d0bbd210', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2db164aeca884db08460570c96ee1562', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1126.183054] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1126.187036] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d9c7df6-458a-46e3-bdb9-5d7aa5fafa9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.207382] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1126.207382] env[68244]: value = "task-2781079" [ 1126.207382] env[68244]: _type = "Task" [ 1126.207382] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.215704] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781079, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.427776] env[68244]: DEBUG oslo_vmware.api [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781078, 'name': ReconfigVM_Task, 'duration_secs': 0.163332} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.428270] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559108', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'name': 'volume-26161298-d0e0-46f2-a061-361dc4332cca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cedcff81-0010-4fa6-95bf-72a4dcac5427', 'attached_at': '', 'detached_at': '', 'volume_id': '26161298-d0e0-46f2-a061-361dc4332cca', 'serial': '26161298-d0e0-46f2-a061-361dc4332cca'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1126.599199] env[68244]: DEBUG nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1126.606397] env[68244]: DEBUG nova.network.neutron [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Successfully created port: 6809cc9e-9900-43c7-850f-f17a020b6c34 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1126.721772] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781079, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.875314] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371d7021-e8c0-42b3-8e80-83ea5ed924ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.882875] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2fd4db-f85f-4067-b7c4-360075ee678b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.915601] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f1f944-0abd-4453-987e-69095e4771a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.924779] env[68244]: DEBUG nova.network.neutron [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Updating instance_info_cache with network_info: [{"id": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "address": "fa:16:3e:38:7e:40", "network": {"id": "d6631cc0-83f4-455c-8c4f-aa536641b083", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1110406862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "713d71c9807247308f468c2ef7ede516", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b35363e-cd", "ovs_interfaceid": "6b35363e-cd67-4df3-a6e4-3fa00355ae06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.928430] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0dbbf8-bd90-4b8a-a8b9-50dc74cd5244 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.945679] env[68244]: DEBUG nova.compute.provider_tree [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.984899] env[68244]: DEBUG nova.objects.instance [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lazy-loading 'flavor' on Instance uuid cedcff81-0010-4fa6-95bf-72a4dcac5427 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.218851] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781079, 'name': ReconfigVM_Task, 'duration_secs': 0.524322} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.219263] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance 'df935885-c313-473d-aa3a-ba81aa999554' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1127.434699] env[68244]: DEBUG oslo_concurrency.lockutils [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Releasing lock "refresh_cache-91422c89-601c-4e5f-b5b0-fa2639031d3e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.435407] env[68244]: DEBUG nova.objects.instance [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lazy-loading 'flavor' on Instance uuid 91422c89-601c-4e5f-b5b0-fa2639031d3e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.451972] env[68244]: DEBUG nova.scheduler.client.report [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.612551] env[68244]: DEBUG nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1127.650530] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1127.650784] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.650942] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1127.651149] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.651295] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1127.651440] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1127.651683] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1127.652371] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1127.652728] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1127.652825] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1127.653063] env[68244]: DEBUG nova.virt.hardware [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1127.653943] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298865fd-7f04-4a40-b6fa-fd4141281bae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.663203] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd4458c-ca88-4aba-ada0-d5c4163bca48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.729490] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1127.729775] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.729933] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1127.730158] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.730928] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1127.730928] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1127.730928] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1127.730928] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1127.731204] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1127.731204] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1127.731363] env[68244]: DEBUG nova.virt.hardware [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1127.736798] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1127.737107] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02ad928f-3365-49f4-b9b0-f0c6bf79c51c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.756829] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1127.756829] env[68244]: value = "task-2781080" [ 1127.756829] env[68244]: _type = "Task" [ 1127.756829] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.764967] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.940951] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083c9a1a-513d-464d-bdb9-9baec93de963 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.965278] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.965936] env[68244]: DEBUG nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1127.969174] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.969629] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.293s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.971155] env[68244]: INFO nova.compute.claims [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.977020] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38280e2c-a4b4-4f74-be15-c0dc537a07d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.980376] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1127.980376] env[68244]: value = "task-2781081" [ 1127.980376] env[68244]: _type = "Task" [ 1127.980376] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.990377] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781081, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.992109] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b75b2ad8-ae1d-4f28-97be-f1a30e5df98e tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.266s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.151062] env[68244]: DEBUG nova.compute.manager [req-baed861a-0289-4ad2-9624-350045b26bac req-2710433c-c199-4a35-b90c-eba4c1db6293 service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Received event network-vif-plugged-6809cc9e-9900-43c7-850f-f17a020b6c34 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1128.151359] env[68244]: DEBUG oslo_concurrency.lockutils [req-baed861a-0289-4ad2-9624-350045b26bac req-2710433c-c199-4a35-b90c-eba4c1db6293 service nova] Acquiring lock "d4fd9092-9081-4be0-b33d-c175be24f12e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.151573] env[68244]: DEBUG oslo_concurrency.lockutils [req-baed861a-0289-4ad2-9624-350045b26bac req-2710433c-c199-4a35-b90c-eba4c1db6293 service nova] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.151741] env[68244]: DEBUG oslo_concurrency.lockutils [req-baed861a-0289-4ad2-9624-350045b26bac req-2710433c-c199-4a35-b90c-eba4c1db6293 service nova] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.151979] env[68244]: DEBUG nova.compute.manager [req-baed861a-0289-4ad2-9624-350045b26bac req-2710433c-c199-4a35-b90c-eba4c1db6293 service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] No waiting events found dispatching network-vif-plugged-6809cc9e-9900-43c7-850f-f17a020b6c34 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1128.152099] env[68244]: WARNING nova.compute.manager [req-baed861a-0289-4ad2-9624-350045b26bac req-2710433c-c199-4a35-b90c-eba4c1db6293 service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Received unexpected event network-vif-plugged-6809cc9e-9900-43c7-850f-f17a020b6c34 for instance with vm_state building and task_state spawning. [ 1128.267139] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781080, 'name': ReconfigVM_Task, 'duration_secs': 0.373038} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.267139] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1128.267820] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d388e3d2-6ab2-49d2-9781-3c00ef848bb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.296911] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1128.297581] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-149b7565-98a2-4647-adee-9b71862e9e6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.318615] env[68244]: DEBUG nova.network.neutron [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Successfully updated port: 6809cc9e-9900-43c7-850f-f17a020b6c34 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.321788] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1128.321788] env[68244]: value = "task-2781082" [ 1128.321788] env[68244]: _type = "Task" [ 1128.321788] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.329848] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.346466] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.346697] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.346898] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "cedcff81-0010-4fa6-95bf-72a4dcac5427-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.347109] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.347284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.353298] env[68244]: INFO nova.compute.manager [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Terminating instance [ 1128.471149] env[68244]: DEBUG nova.compute.utils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1128.473078] env[68244]: DEBUG nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1128.473541] env[68244]: DEBUG nova.network.neutron [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1128.490674] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781081, 'name': PowerOffVM_Task, 'duration_secs': 0.223138} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.490943] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1128.496342] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Reconfiguring VM instance instance-0000005d to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1128.497052] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95f62e58-3c1d-4ade-b26a-d01b86510d84 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.518300] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1128.518300] env[68244]: value = "task-2781083" [ 1128.518300] env[68244]: _type = "Task" [ 1128.518300] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.528427] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781083, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.528823] env[68244]: DEBUG nova.policy [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fa4f9c8b0f1450788cd56a89e23d93a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a581fe596ee49c6b66f17d1ed11d120', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1128.818979] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "refresh_cache-d4fd9092-9081-4be0-b33d-c175be24f12e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.819224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquired lock "refresh_cache-d4fd9092-9081-4be0-b33d-c175be24f12e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.820334] env[68244]: DEBUG nova.network.neutron [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1128.833159] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.857398] env[68244]: DEBUG nova.compute.manager [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1128.857684] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1128.858613] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6337be6c-6734-47bb-ad64-d347b2260bda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.867283] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1128.867543] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-159630fa-ab33-458c-87a3-39b1b4c16c18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.875329] env[68244]: DEBUG oslo_vmware.api [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1128.875329] env[68244]: value = "task-2781084" [ 1128.875329] env[68244]: _type = "Task" [ 1128.875329] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.887665] env[68244]: DEBUG oslo_vmware.api [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.916296] env[68244]: DEBUG nova.network.neutron [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Successfully created port: 308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.983435] env[68244]: DEBUG nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1129.032559] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781083, 'name': ReconfigVM_Task, 'duration_secs': 0.272129} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.032929] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Reconfigured VM instance instance-0000005d to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1129.033154] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.033431] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71ec87e1-4dae-48d2-8839-42fe80b5fd83 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.043603] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1129.043603] env[68244]: value = "task-2781085" [ 1129.043603] env[68244]: _type = "Task" [ 1129.043603] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.059330] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781085, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.301074] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad570c0-ca25-4c4a-9e1c-896b6377ef91 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.310107] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c911542c-20d4-4271-b1dd-382a73991a1c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.346391] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811b7bf6-cb04-4313-82d6-0eaa17b6124f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.354037] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781082, 'name': ReconfigVM_Task, 'duration_secs': 0.606778} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.356207] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfigured VM instance instance-00000054 to attach disk [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.357470] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance 'df935885-c313-473d-aa3a-ba81aa999554' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1129.361783] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f328f4d2-6dcf-40e1-80f0-2618b23bbfe2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.377987] env[68244]: DEBUG nova.compute.provider_tree [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.380445] env[68244]: DEBUG nova.network.neutron [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1129.393402] env[68244]: DEBUG oslo_vmware.api [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781084, 'name': PowerOffVM_Task, 'duration_secs': 0.32679} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.395227] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.395596] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.395871] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c4a8707-7e2b-4b69-a5b8-9a7fbaaf8de5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.472550] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.472550] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.472550] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleting the datastore file [datastore2] cedcff81-0010-4fa6-95bf-72a4dcac5427 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.472550] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fadf8d9-3e94-471f-a3d8-bd658c9485f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.479639] env[68244]: DEBUG oslo_vmware.api [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1129.479639] env[68244]: value = "task-2781087" [ 1129.479639] env[68244]: _type = "Task" [ 1129.479639] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.490752] env[68244]: DEBUG oslo_vmware.api [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.556982] env[68244]: DEBUG oslo_vmware.api [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781085, 'name': PowerOnVM_Task, 'duration_secs': 0.387458} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.556982] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1129.556982] env[68244]: DEBUG nova.compute.manager [None req-504bd825-0069-434b-90b6-a74a1f5541b9 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1129.557174] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b772c08-d0fa-4abb-b79f-c89be98b7f24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.560294] env[68244]: DEBUG nova.network.neutron [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Updating instance_info_cache with network_info: [{"id": "6809cc9e-9900-43c7-850f-f17a020b6c34", "address": "fa:16:3e:f0:2e:35", "network": {"id": "394a62cf-e0c6-49bf-9f6b-b0a7898d0c72", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1782232478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db164aeca884db08460570c96ee1562", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6809cc9e-99", "ovs_interfaceid": "6809cc9e-9900-43c7-850f-f17a020b6c34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.868502] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef54cd82-22aa-48b2-8316-7984de0d2459 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.892088] env[68244]: DEBUG nova.scheduler.client.report [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.896345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ff5473-6eea-4528-9966-59d4ea88e368 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.918472] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance 'df935885-c313-473d-aa3a-ba81aa999554' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1129.989022] env[68244]: DEBUG oslo_vmware.api [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.992730] env[68244]: DEBUG nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1130.019583] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1130.019824] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.019979] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1130.020197] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.020372] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1130.020521] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1130.020730] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1130.020889] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1130.021273] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1130.021480] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1130.021663] env[68244]: DEBUG nova.virt.hardware [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1130.022601] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93831ce8-ef36-43a3-81f2-2db3415b0fd2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.031083] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f54440-71ba-4907-83e4-62b1a125a68c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.062790] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Releasing lock "refresh_cache-d4fd9092-9081-4be0-b33d-c175be24f12e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.063431] env[68244]: DEBUG nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Instance network_info: |[{"id": "6809cc9e-9900-43c7-850f-f17a020b6c34", "address": "fa:16:3e:f0:2e:35", "network": {"id": "394a62cf-e0c6-49bf-9f6b-b0a7898d0c72", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1782232478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db164aeca884db08460570c96ee1562", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6809cc9e-99", "ovs_interfaceid": "6809cc9e-9900-43c7-850f-f17a020b6c34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1130.063853] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:2e:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6809cc9e-9900-43c7-850f-f17a020b6c34', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1130.071768] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Creating folder: Project (2db164aeca884db08460570c96ee1562). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1130.072055] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fb7bc89-4eb5-495f-9fa9-eca3aeb83a20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.082717] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Created folder: Project (2db164aeca884db08460570c96ee1562) in parent group-v558876. [ 1130.082934] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Creating folder: Instances. Parent ref: group-v559141. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1130.083256] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-051960e2-5bfa-45a3-b506-66e8550f7d4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.093471] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Created folder: Instances in parent group-v559141. [ 1130.093871] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.094480] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1130.094654] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29d59959-16f8-45b9-9868-91b1fd4306a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.115834] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1130.115834] env[68244]: value = "task-2781090" [ 1130.115834] env[68244]: _type = "Task" [ 1130.115834] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.124372] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781090, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.181330] env[68244]: DEBUG nova.compute.manager [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Received event network-changed-6809cc9e-9900-43c7-850f-f17a020b6c34 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1130.182018] env[68244]: DEBUG nova.compute.manager [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Refreshing instance network info cache due to event network-changed-6809cc9e-9900-43c7-850f-f17a020b6c34. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1130.182285] env[68244]: DEBUG oslo_concurrency.lockutils [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] Acquiring lock "refresh_cache-d4fd9092-9081-4be0-b33d-c175be24f12e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.185624] env[68244]: DEBUG oslo_concurrency.lockutils [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] Acquired lock "refresh_cache-d4fd9092-9081-4be0-b33d-c175be24f12e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.185624] env[68244]: DEBUG nova.network.neutron [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Refreshing network info cache for port 6809cc9e-9900-43c7-850f-f17a020b6c34 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.185624] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.185624] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.396996] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.503442] env[68244]: DEBUG oslo_vmware.api [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.635711} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.503442] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.503769] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1130.503769] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1130.503926] env[68244]: INFO nova.compute.manager [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1130.504071] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.504266] env[68244]: DEBUG nova.compute.manager [-] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1130.504351] env[68244]: DEBUG nova.network.neutron [-] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1130.625772] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781090, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.689026] env[68244]: DEBUG nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1130.761054] env[68244]: DEBUG nova.network.neutron [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Successfully updated port: 308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1130.870888] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "91422c89-601c-4e5f-b5b0-fa2639031d3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.871403] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.871403] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "91422c89-601c-4e5f-b5b0-fa2639031d3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.871613] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.871761] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.873806] env[68244]: INFO nova.compute.manager [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Terminating instance [ 1130.899468] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "69405b9b-2b8d-4050-98d2-91ba932818d4" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.899673] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "69405b9b-2b8d-4050-98d2-91ba932818d4" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.126874] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781090, 'name': CreateVM_Task, 'duration_secs': 0.521339} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.127122] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1131.127816] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.127986] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.128352] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1131.128611] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44db9528-820d-475d-aa56-ccc682c9857b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.133282] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1131.133282] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5213d8ca-b5e2-71e3-cb00-b7768d98d4c5" [ 1131.133282] env[68244]: _type = "Task" [ 1131.133282] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.141597] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5213d8ca-b5e2-71e3-cb00-b7768d98d4c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.199479] env[68244]: DEBUG nova.network.neutron [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Updated VIF entry in instance network info cache for port 6809cc9e-9900-43c7-850f-f17a020b6c34. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1131.199831] env[68244]: DEBUG nova.network.neutron [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Updating instance_info_cache with network_info: [{"id": "6809cc9e-9900-43c7-850f-f17a020b6c34", "address": "fa:16:3e:f0:2e:35", "network": {"id": "394a62cf-e0c6-49bf-9f6b-b0a7898d0c72", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1782232478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db164aeca884db08460570c96ee1562", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6809cc9e-99", "ovs_interfaceid": "6809cc9e-9900-43c7-850f-f17a020b6c34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.214309] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.214309] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.214309] env[68244]: INFO nova.compute.claims [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1131.263502] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "refresh_cache-8d0fa6cd-c14f-49ab-9595-396a10b4639a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.263549] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "refresh_cache-8d0fa6cd-c14f-49ab-9595-396a10b4639a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.263760] env[68244]: DEBUG nova.network.neutron [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1131.317612] env[68244]: DEBUG nova.compute.manager [req-79e4679e-43bc-4387-b157-64af283063aa req-d1083965-bc04-4b6e-be63-291b25f7759b service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Received event network-vif-deleted-7cc08f9f-ecf6-45df-a147-29489ed20ade {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1131.317704] env[68244]: INFO nova.compute.manager [req-79e4679e-43bc-4387-b157-64af283063aa req-d1083965-bc04-4b6e-be63-291b25f7759b service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Neutron deleted interface 7cc08f9f-ecf6-45df-a147-29489ed20ade; detaching it from the instance and deleting it from the info cache [ 1131.317841] env[68244]: DEBUG nova.network.neutron [req-79e4679e-43bc-4387-b157-64af283063aa req-d1083965-bc04-4b6e-be63-291b25f7759b service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.379974] env[68244]: DEBUG nova.compute.manager [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1131.380352] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1131.381293] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6023261-bf3c-40c4-9dcc-c251c283b93e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.388834] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1131.389102] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02a5fe66-abce-454a-9ad6-6813796fc5e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.396409] env[68244]: DEBUG oslo_vmware.api [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1131.396409] env[68244]: value = "task-2781091" [ 1131.396409] env[68244]: _type = "Task" [ 1131.396409] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.404151] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "69405b9b-2b8d-4050-98d2-91ba932818d4" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.404756] env[68244]: DEBUG nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1131.407489] env[68244]: DEBUG oslo_vmware.api [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.603510] env[68244]: DEBUG nova.network.neutron [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Port 9389f00b-7d76-4743-9f6d-d9af08918ce6 binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1131.643106] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5213d8ca-b5e2-71e3-cb00-b7768d98d4c5, 'name': SearchDatastore_Task, 'duration_secs': 0.016692} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.643427] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.643666] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1131.643903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.644076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.644330] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1131.644527] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d97cfe93-f94d-47e1-94ae-554973c16a9b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.652978] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1131.653173] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1131.653915] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e25bc6-e76c-4985-890f-ed626de5575a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.659348] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1131.659348] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525bae3a-c2c1-b985-7707-841e97c5e58d" [ 1131.659348] env[68244]: _type = "Task" [ 1131.659348] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.666733] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525bae3a-c2c1-b985-7707-841e97c5e58d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.703066] env[68244]: DEBUG oslo_concurrency.lockutils [req-2b8c0701-2ff2-4bc4-abcb-fff27805d6e5 req-981d2c8d-8749-41db-bf7e-5c04db5075ac service nova] Releasing lock "refresh_cache-d4fd9092-9081-4be0-b33d-c175be24f12e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.795273] env[68244]: DEBUG nova.network.neutron [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1131.797642] env[68244]: DEBUG nova.network.neutron [-] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.821750] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0999fc6-fe86-431f-b898-d32f128461d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.830497] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65a8ace-6562-4578-9b43-4a39052e9eb0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.871708] env[68244]: DEBUG nova.compute.manager [req-79e4679e-43bc-4387-b157-64af283063aa req-d1083965-bc04-4b6e-be63-291b25f7759b service nova] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Detach interface failed, port_id=7cc08f9f-ecf6-45df-a147-29489ed20ade, reason: Instance cedcff81-0010-4fa6-95bf-72a4dcac5427 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1131.906628] env[68244]: DEBUG oslo_vmware.api [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781091, 'name': PowerOffVM_Task, 'duration_secs': 0.211037} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.906910] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1131.907108] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1131.907368] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e09b511-6a2a-495a-9019-9b49ea7a1782 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.911191] env[68244]: DEBUG nova.compute.utils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1131.912511] env[68244]: DEBUG nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1131.912690] env[68244]: DEBUG nova.network.neutron [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1131.975598] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1131.975834] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1131.976028] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Deleting the datastore file [datastore2] 91422c89-601c-4e5f-b5b0-fa2639031d3e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1131.976320] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ab6f492-0aa2-4710-8dbe-c4de7468b2f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.981530] env[68244]: DEBUG nova.policy [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9cd1d01ffed436fa2c69f91a64201cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4882d6423a046bdb3626c18f06f056c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1131.984219] env[68244]: DEBUG nova.network.neutron [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Updating instance_info_cache with network_info: [{"id": "308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9", "address": "fa:16:3e:8f:8c:c1", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308e90b2-5c", "ovs_interfaceid": "308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.992246] env[68244]: DEBUG oslo_vmware.api [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1131.992246] env[68244]: value = "task-2781093" [ 1131.992246] env[68244]: _type = "Task" [ 1131.992246] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.008141] env[68244]: DEBUG oslo_vmware.api [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.171044] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525bae3a-c2c1-b985-7707-841e97c5e58d, 'name': SearchDatastore_Task, 'duration_secs': 0.010417} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.171849] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bfeca68-45b3-4183-882d-9f6832331a9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.177130] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1132.177130] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52037706-6601-061c-7ba2-1f28ed39d8a5" [ 1132.177130] env[68244]: _type = "Task" [ 1132.177130] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.186594] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52037706-6601-061c-7ba2-1f28ed39d8a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.206371] env[68244]: DEBUG nova.compute.manager [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Received event network-vif-plugged-308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1132.206565] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] Acquiring lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.206776] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.207374] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.207374] env[68244]: DEBUG nova.compute.manager [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] No waiting events found dispatching network-vif-plugged-308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1132.207374] env[68244]: WARNING nova.compute.manager [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Received unexpected event network-vif-plugged-308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 for instance with vm_state building and task_state spawning. [ 1132.207530] env[68244]: DEBUG nova.compute.manager [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Received event network-changed-308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1132.207572] env[68244]: DEBUG nova.compute.manager [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Refreshing instance network info cache due to event network-changed-308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1132.207703] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] Acquiring lock "refresh_cache-8d0fa6cd-c14f-49ab-9595-396a10b4639a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.256590] env[68244]: DEBUG nova.network.neutron [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Successfully created port: 46f33af8-00d6-49df-b0a3-3bc05bdff4d1 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1132.300120] env[68244]: INFO nova.compute.manager [-] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Took 1.80 seconds to deallocate network for instance. [ 1132.416142] env[68244]: DEBUG nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1132.482784] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1a8a61-f6ea-442d-bff2-f2ea08f2ed3c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.490096] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "refresh_cache-8d0fa6cd-c14f-49ab-9595-396a10b4639a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.490443] env[68244]: DEBUG nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Instance network_info: |[{"id": "308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9", "address": "fa:16:3e:8f:8c:c1", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308e90b2-5c", "ovs_interfaceid": "308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1132.490799] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] Acquired lock "refresh_cache-8d0fa6cd-c14f-49ab-9595-396a10b4639a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.490976] env[68244]: DEBUG nova.network.neutron [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Refreshing network info cache for port 308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1132.492742] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:8c:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.499523] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1132.500459] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a251bbd-0cc9-4e36-ab23-fe369cb64ac9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.506393] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1132.509547] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72575138-f86d-4ccf-9bdd-e2d3fcce6ed7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.529799] env[68244]: DEBUG oslo_vmware.api [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17393} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.554567] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1132.554793] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1132.554970] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1132.555163] env[68244]: INFO nova.compute.manager [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1132.555410] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1132.555605] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1132.555605] env[68244]: value = "task-2781094" [ 1132.555605] env[68244]: _type = "Task" [ 1132.555605] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.558557] env[68244]: DEBUG nova.compute.manager [-] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1132.558647] env[68244]: DEBUG nova.network.neutron [-] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1132.561030] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bb2717-dafe-4a2e-884a-786013425124 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.565964] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.566176] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.566406] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.566592] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.566760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.572028] env[68244]: INFO nova.compute.manager [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Terminating instance [ 1132.580985] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b1e7e3-3db6-4733-a7e7-75a9ccd2e349 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.586023] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781094, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.586373] env[68244]: DEBUG nova.compute.manager [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1132.586583] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1132.587623] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d994363-01ae-402c-9c5c-094882216d5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.603964] env[68244]: DEBUG nova.compute.provider_tree [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1132.611609] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.622544] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c6e5071-46cd-41ba-8ed7-451f66a52ea6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.633638] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "df935885-c313-473d-aa3a-ba81aa999554-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.634039] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.634351] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.641455] env[68244]: DEBUG oslo_vmware.api [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1132.641455] env[68244]: value = "task-2781095" [ 1132.641455] env[68244]: _type = "Task" [ 1132.641455] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.653856] env[68244]: DEBUG oslo_vmware.api [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2781095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.688508] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52037706-6601-061c-7ba2-1f28ed39d8a5, 'name': SearchDatastore_Task, 'duration_secs': 0.012393} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.688839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.689176] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] d4fd9092-9081-4be0-b33d-c175be24f12e/d4fd9092-9081-4be0-b33d-c175be24f12e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1132.689465] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ea56806-0642-43be-b5df-9b7b316e383e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.697591] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1132.697591] env[68244]: value = "task-2781096" [ 1132.697591] env[68244]: _type = "Task" [ 1132.697591] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.709646] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.768048] env[68244]: DEBUG nova.network.neutron [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Updated VIF entry in instance network info cache for port 308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1132.768678] env[68244]: DEBUG nova.network.neutron [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Updating instance_info_cache with network_info: [{"id": "308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9", "address": "fa:16:3e:8f:8c:c1", "network": {"id": "a351e37b-c431-448a-865b-79d39ead319b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1905243553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a581fe596ee49c6b66f17d1ed11d120", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308e90b2-5c", "ovs_interfaceid": "308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.806565] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.069626] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781094, 'name': CreateVM_Task, 'duration_secs': 0.365713} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.069809] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1133.070966] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.070966] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.071141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1133.071386] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aecf84db-527d-4fe8-ab5b-5d99b2622664 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.075840] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1133.075840] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52df39cc-cbee-8cb4-1187-45774880167a" [ 1133.075840] env[68244]: _type = "Task" [ 1133.075840] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.083829] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52df39cc-cbee-8cb4-1187-45774880167a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.133230] env[68244]: ERROR nova.scheduler.client.report [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [req-e6886aa9-190b-4d4a-bf8b-fc30592aaad3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e6886aa9-190b-4d4a-bf8b-fc30592aaad3"}]} [ 1133.152438] env[68244]: DEBUG oslo_vmware.api [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2781095, 'name': PowerOffVM_Task, 'duration_secs': 0.248886} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.153528] env[68244]: DEBUG nova.scheduler.client.report [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1133.156055] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1133.156259] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1133.157551] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44bf7f35-04c8-45f8-810f-83e4ff878967 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.176878] env[68244]: DEBUG nova.scheduler.client.report [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1133.177131] env[68244]: DEBUG nova.compute.provider_tree [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1133.205466] env[68244]: DEBUG nova.scheduler.client.report [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1133.213240] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781096, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.229330] env[68244]: DEBUG nova.scheduler.client.report [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1133.240245] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1133.240612] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1133.241081] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Deleting the datastore file [datastore2] fc75039c-f2d0-4d4b-9a82-b605b6ba63d5 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1133.241346] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f69395e5-92e3-4c6d-80ec-2d1c374dedd7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.249197] env[68244]: DEBUG oslo_vmware.api [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for the task: (returnval){ [ 1133.249197] env[68244]: value = "task-2781098" [ 1133.249197] env[68244]: _type = "Task" [ 1133.249197] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.266563] env[68244]: DEBUG oslo_vmware.api [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2781098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.272740] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb1266b9-282b-468d-aec5-34ebdc57156a req-542c63f6-7f3d-4b86-8295-00da05d33901 service nova] Releasing lock "refresh_cache-8d0fa6cd-c14f-49ab-9595-396a10b4639a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.428856] env[68244]: DEBUG nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1133.455562] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1133.455896] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.456130] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.456391] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.456568] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.456754] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1133.457013] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1133.457238] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1133.457468] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1133.457968] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1133.457968] env[68244]: DEBUG nova.virt.hardware [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1133.458932] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5566ee-199e-4b1a-b780-3a1b2a943e39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.471371] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e48621-218d-4d74-9e3a-b10ac0d912e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.527494] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf95202-3432-4b2c-910c-30c81857c856 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.534978] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ada1628-edab-421d-8b24-1c6213589053 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.565299] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ba37f7-24ca-4aa2-b98f-dde5d9e5d594 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.572981] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf93044-33d4-4c18-9bf3-7d067d3cde84 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.591543] env[68244]: DEBUG nova.compute.provider_tree [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1133.596693] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52df39cc-cbee-8cb4-1187-45774880167a, 'name': SearchDatastore_Task, 'duration_secs': 0.013135} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.597396] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.597639] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1133.597890] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.598058] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.598277] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1133.598573] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8d0f6a5-fbe3-4683-9f8e-c5da2a05944e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.608606] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1133.608813] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1133.609889] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f20e89d-3fee-4d67-87ca-d2a0a1c46eb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.616037] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1133.616037] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c31d4f-590a-b289-0406-3fae11058c39" [ 1133.616037] env[68244]: _type = "Task" [ 1133.616037] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.624018] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c31d4f-590a-b289-0406-3fae11058c39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.652404] env[68244]: DEBUG nova.compute.manager [req-bc6f372b-2c74-4da2-9cf6-5b2eed8d6c7c req-1b85b817-e948-4fa1-9606-a7a7d8bc2a39 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Received event network-vif-plugged-46f33af8-00d6-49df-b0a3-3bc05bdff4d1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1133.652627] env[68244]: DEBUG oslo_concurrency.lockutils [req-bc6f372b-2c74-4da2-9cf6-5b2eed8d6c7c req-1b85b817-e948-4fa1-9606-a7a7d8bc2a39 service nova] Acquiring lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.652829] env[68244]: DEBUG oslo_concurrency.lockutils [req-bc6f372b-2c74-4da2-9cf6-5b2eed8d6c7c req-1b85b817-e948-4fa1-9606-a7a7d8bc2a39 service nova] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.653029] env[68244]: DEBUG oslo_concurrency.lockutils [req-bc6f372b-2c74-4da2-9cf6-5b2eed8d6c7c req-1b85b817-e948-4fa1-9606-a7a7d8bc2a39 service nova] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.654063] env[68244]: DEBUG nova.compute.manager [req-bc6f372b-2c74-4da2-9cf6-5b2eed8d6c7c req-1b85b817-e948-4fa1-9606-a7a7d8bc2a39 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] No waiting events found dispatching network-vif-plugged-46f33af8-00d6-49df-b0a3-3bc05bdff4d1 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1133.654063] env[68244]: WARNING nova.compute.manager [req-bc6f372b-2c74-4da2-9cf6-5b2eed8d6c7c req-1b85b817-e948-4fa1-9606-a7a7d8bc2a39 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Received unexpected event network-vif-plugged-46f33af8-00d6-49df-b0a3-3bc05bdff4d1 for instance with vm_state building and task_state spawning. [ 1133.674612] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.674708] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.674875] env[68244]: DEBUG nova.network.neutron [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1133.681700] env[68244]: DEBUG nova.network.neutron [-] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.707721] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781096, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.949345} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.708131] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore1] d4fd9092-9081-4be0-b33d-c175be24f12e/d4fd9092-9081-4be0-b33d-c175be24f12e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1133.708416] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1133.708734] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a810c76-7f04-405c-8a82-e164551bbad9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.721051] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1133.721051] env[68244]: value = "task-2781099" [ 1133.721051] env[68244]: _type = "Task" [ 1133.721051] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.731121] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781099, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.746870] env[68244]: DEBUG nova.network.neutron [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Successfully updated port: 46f33af8-00d6-49df-b0a3-3bc05bdff4d1 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1133.757614] env[68244]: DEBUG oslo_vmware.api [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Task: {'id': task-2781098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24709} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.758577] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1133.758809] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1133.759018] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1133.759199] env[68244]: INFO nova.compute.manager [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1133.759439] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1133.759897] env[68244]: DEBUG nova.compute.manager [-] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1133.759997] env[68244]: DEBUG nova.network.neutron [-] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1134.126676] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c31d4f-590a-b289-0406-3fae11058c39, 'name': SearchDatastore_Task, 'duration_secs': 0.010902} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.127671] env[68244]: DEBUG nova.scheduler.client.report [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 143 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1134.127915] env[68244]: DEBUG nova.compute.provider_tree [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 143 to 144 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1134.128230] env[68244]: DEBUG nova.compute.provider_tree [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1134.132578] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1532f3bd-12c2-4a68-9375-081630e6da3f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.141027] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1134.141027] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f02eab-26f0-76c0-d55a-0565dabcf83e" [ 1134.141027] env[68244]: _type = "Task" [ 1134.141027] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.146146] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f02eab-26f0-76c0-d55a-0565dabcf83e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.184240] env[68244]: INFO nova.compute.manager [-] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Took 1.63 seconds to deallocate network for instance. [ 1134.234323] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781099, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116157} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.235268] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1134.236124] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e2c784-24e8-4184-b2a1-9aa20ac036bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.241826] env[68244]: DEBUG nova.compute.manager [req-e40f8d73-a852-4c58-8fe6-a6c8e17b50f0 req-9f5bf305-35cb-4c6d-b738-9ff367b360f6 service nova] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Received event network-vif-deleted-6b35363e-cd67-4df3-a6e4-3fa00355ae06 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1134.257480] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "refresh_cache-ae8211ae-82bb-4a69-aa27-e81de2a06abe" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.257480] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquired lock "refresh_cache-ae8211ae-82bb-4a69-aa27-e81de2a06abe" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.257642] env[68244]: DEBUG nova.network.neutron [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.267667] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] d4fd9092-9081-4be0-b33d-c175be24f12e/d4fd9092-9081-4be0-b33d-c175be24f12e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.271956] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07504815-2764-4ac8-afb2-bb11071e5c37 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.297292] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1134.297292] env[68244]: value = "task-2781100" [ 1134.297292] env[68244]: _type = "Task" [ 1134.297292] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.309225] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781100, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.467093] env[68244]: DEBUG nova.network.neutron [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.633725] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.421s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.634269] env[68244]: DEBUG nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1134.636926] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.831s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.637164] env[68244]: DEBUG nova.objects.instance [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lazy-loading 'resources' on Instance uuid cedcff81-0010-4fa6-95bf-72a4dcac5427 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1134.647565] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f02eab-26f0-76c0-d55a-0565dabcf83e, 'name': SearchDatastore_Task, 'duration_secs': 0.035667} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.649021] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.649021] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8d0fa6cd-c14f-49ab-9595-396a10b4639a/8d0fa6cd-c14f-49ab-9595-396a10b4639a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1134.649195] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2cdfafb-a509-4967-b7d7-7c497e32d3b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.655223] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1134.655223] env[68244]: value = "task-2781101" [ 1134.655223] env[68244]: _type = "Task" [ 1134.655223] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.663189] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.669438] env[68244]: DEBUG nova.network.neutron [-] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.694959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.803543] env[68244]: DEBUG nova.network.neutron [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1134.808882] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781100, 'name': ReconfigVM_Task, 'duration_secs': 0.314071} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.809186] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Reconfigured VM instance instance-0000005f to attach disk [datastore1] d4fd9092-9081-4be0-b33d-c175be24f12e/d4fd9092-9081-4be0-b33d-c175be24f12e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1134.811042] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29655b10-1a05-40ef-a962-99a065da20df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.817189] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1134.817189] env[68244]: value = "task-2781102" [ 1134.817189] env[68244]: _type = "Task" [ 1134.817189] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.825155] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781102, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.947482] env[68244]: DEBUG nova.network.neutron [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Updating instance_info_cache with network_info: [{"id": "46f33af8-00d6-49df-b0a3-3bc05bdff4d1", "address": "fa:16:3e:de:36:3a", "network": {"id": "ad478657-594d-4004-b84a-aa37a0ec17f8", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1949449236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4882d6423a046bdb3626c18f06f056c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b3cfeb1-f262-4fd9-b506-8e9c0733e2d8", "external-id": "nsx-vlan-transportzone-119", "segmentation_id": 119, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46f33af8-00", "ovs_interfaceid": "46f33af8-00d6-49df-b0a3-3bc05bdff4d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.969500] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.139811] env[68244]: DEBUG nova.compute.utils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1135.144488] env[68244]: DEBUG nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1135.144696] env[68244]: DEBUG nova.network.neutron [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1135.165860] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781101, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507922} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.166239] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8d0fa6cd-c14f-49ab-9595-396a10b4639a/8d0fa6cd-c14f-49ab-9595-396a10b4639a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1135.166518] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1135.169149] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-328875ac-e681-4f9e-b89b-50956e501991 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.171961] env[68244]: INFO nova.compute.manager [-] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Took 1.41 seconds to deallocate network for instance. [ 1135.178186] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1135.178186] env[68244]: value = "task-2781103" [ 1135.178186] env[68244]: _type = "Task" [ 1135.178186] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.186736] env[68244]: DEBUG nova.policy [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8136fdb17a934648ace23cbae52d7af7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '297edd7cb8934787b815a3230f85b139', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1135.194113] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.327139] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781102, 'name': Rename_Task, 'duration_secs': 0.162218} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.329727] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.330202] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd8f9c3b-7cf1-4d32-9f4b-27ed7b50592f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.336595] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1135.336595] env[68244]: value = "task-2781104" [ 1135.336595] env[68244]: _type = "Task" [ 1135.336595] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.347665] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.398546] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c0ab16-6841-4d57-829d-fddb06b4d27c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.406741] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01eb3bd4-9184-4796-a182-bfedbff836f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.439429] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7876d25d-8cc7-4452-8d04-7bb054d0e3be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.447599] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3729322d-883a-4bdc-814a-9c8f95343c7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.451800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Releasing lock "refresh_cache-ae8211ae-82bb-4a69-aa27-e81de2a06abe" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.452166] env[68244]: DEBUG nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Instance network_info: |[{"id": "46f33af8-00d6-49df-b0a3-3bc05bdff4d1", "address": "fa:16:3e:de:36:3a", "network": {"id": "ad478657-594d-4004-b84a-aa37a0ec17f8", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1949449236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4882d6423a046bdb3626c18f06f056c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b3cfeb1-f262-4fd9-b506-8e9c0733e2d8", "external-id": "nsx-vlan-transportzone-119", "segmentation_id": 119, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46f33af8-00", "ovs_interfaceid": "46f33af8-00d6-49df-b0a3-3bc05bdff4d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1135.452582] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:36:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b3cfeb1-f262-4fd9-b506-8e9c0733e2d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46f33af8-00d6-49df-b0a3-3bc05bdff4d1', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.460586] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Creating folder: Project (f4882d6423a046bdb3626c18f06f056c). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1135.461384] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d4ec5f6-7d8b-4a11-9c01-b449487f7034 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.471720] env[68244]: DEBUG nova.compute.provider_tree [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.480710] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf49b08-47d1-42d1-8d86-3c006f63b761 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.483053] env[68244]: DEBUG nova.network.neutron [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Successfully created port: e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1135.487385] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Created folder: Project (f4882d6423a046bdb3626c18f06f056c) in parent group-v558876. [ 1135.487568] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Creating folder: Instances. Parent ref: group-v559145. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1135.488444] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59c8dfb4-ad2c-47b7-bf32-1e8c82d5c137 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.492913] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33904d74-1057-4a72-b8c2-d7d6e1ac055a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.499116] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Created folder: Instances in parent group-v559145. [ 1135.499341] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1135.499523] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.499849] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4195d623-47cc-4713-a7c2-9857d3c42089 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.525259] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.525259] env[68244]: value = "task-2781107" [ 1135.525259] env[68244]: _type = "Task" [ 1135.525259] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.537339] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781107, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.646085] env[68244]: DEBUG nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1135.678033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.690254] env[68244]: DEBUG nova.compute.manager [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Received event network-changed-46f33af8-00d6-49df-b0a3-3bc05bdff4d1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1135.690397] env[68244]: DEBUG nova.compute.manager [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Refreshing instance network info cache due to event network-changed-46f33af8-00d6-49df-b0a3-3bc05bdff4d1. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1135.690598] env[68244]: DEBUG oslo_concurrency.lockutils [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] Acquiring lock "refresh_cache-ae8211ae-82bb-4a69-aa27-e81de2a06abe" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.690713] env[68244]: DEBUG oslo_concurrency.lockutils [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] Acquired lock "refresh_cache-ae8211ae-82bb-4a69-aa27-e81de2a06abe" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.690908] env[68244]: DEBUG nova.network.neutron [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Refreshing network info cache for port 46f33af8-00d6-49df-b0a3-3bc05bdff4d1 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1135.695459] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072048} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.695899] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.696732] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d9a65f-3adb-4cbb-a11a-f8e2319e4f4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.721128] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 8d0fa6cd-c14f-49ab-9595-396a10b4639a/8d0fa6cd-c14f-49ab-9595-396a10b4639a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.722125] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2366429-182c-47f0-bce7-7029804b2032 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.743030] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1135.743030] env[68244]: value = "task-2781108" [ 1135.743030] env[68244]: _type = "Task" [ 1135.743030] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.751088] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.784701] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.784917] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.848782] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781104, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.974884] env[68244]: DEBUG nova.scheduler.client.report [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.034687] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781107, 'name': CreateVM_Task, 'duration_secs': 0.460696} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.034916] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.035563] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.035750] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.036086] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.036341] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f704de1-edba-4db4-8ed1-8d86149e0bb2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.040540] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1136.040540] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520496a7-3697-0f91-d16f-3fb83b525c04" [ 1136.040540] env[68244]: _type = "Task" [ 1136.040540] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.048375] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520496a7-3697-0f91-d16f-3fb83b525c04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.254458] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781108, 'name': ReconfigVM_Task, 'duration_secs': 0.308697} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.254748] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 8d0fa6cd-c14f-49ab-9595-396a10b4639a/8d0fa6cd-c14f-49ab-9595-396a10b4639a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.255506] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd48294e-3332-450a-8b66-ecd0ec00f09d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.263014] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1136.263014] env[68244]: value = "task-2781109" [ 1136.263014] env[68244]: _type = "Task" [ 1136.263014] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.274604] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781109, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.293136] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.293349] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.293516] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.293923] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.293923] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.294053] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.294219] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1136.294477] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.347755] env[68244]: DEBUG oslo_vmware.api [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781104, 'name': PowerOnVM_Task, 'duration_secs': 0.512621} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.348020] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.348222] env[68244]: INFO nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1136.348448] env[68244]: DEBUG nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.349212] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b413a8-6840-4376-836f-5d7f1e52228d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.475853] env[68244]: DEBUG nova.network.neutron [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Updated VIF entry in instance network info cache for port 46f33af8-00d6-49df-b0a3-3bc05bdff4d1. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1136.476279] env[68244]: DEBUG nova.network.neutron [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Updating instance_info_cache with network_info: [{"id": "46f33af8-00d6-49df-b0a3-3bc05bdff4d1", "address": "fa:16:3e:de:36:3a", "network": {"id": "ad478657-594d-4004-b84a-aa37a0ec17f8", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1949449236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4882d6423a046bdb3626c18f06f056c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b3cfeb1-f262-4fd9-b506-8e9c0733e2d8", "external-id": "nsx-vlan-transportzone-119", "segmentation_id": 119, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46f33af8-00", "ovs_interfaceid": "46f33af8-00d6-49df-b0a3-3bc05bdff4d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.481741] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.845s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.484045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.789s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.484271] env[68244]: DEBUG nova.objects.instance [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lazy-loading 'resources' on Instance uuid 91422c89-601c-4e5f-b5b0-fa2639031d3e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.504897] env[68244]: INFO nova.scheduler.client.report [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleted allocations for instance cedcff81-0010-4fa6-95bf-72a4dcac5427 [ 1136.551913] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520496a7-3697-0f91-d16f-3fb83b525c04, 'name': SearchDatastore_Task, 'duration_secs': 0.015748} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.552238] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.552492] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.552760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.552908] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.553102] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.553911] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ecca4ef-fefa-419b-a316-48fbfa8d1020 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.561975] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.562155] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1136.563058] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce2e32d4-4c14-46c4-a90a-781e1ceccb3c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.568330] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1136.568330] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f96496-4591-e4a1-4c8d-f2208d2ca592" [ 1136.568330] env[68244]: _type = "Task" [ 1136.568330] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.575486] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f96496-4591-e4a1-4c8d-f2208d2ca592, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.603919] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d1d613-00c8-4649-a4f9-7eae56127715 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.625314] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ca09bb-b4a7-4de7-afb5-9619a5b03f19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.631946] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance 'df935885-c313-473d-aa3a-ba81aa999554' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.654546] env[68244]: DEBUG nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1136.687784] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1136.687784] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1136.687784] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1136.687784] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1136.688033] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1136.689082] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1136.689082] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1136.689082] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1136.689082] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1136.689248] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1136.689296] env[68244]: DEBUG nova.virt.hardware [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1136.693197] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c10dd31-6615-4c75-9d23-54b5fa1e212f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.698735] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9870c4-6506-48d9-a39f-c6e0138d704c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.778854] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781109, 'name': Rename_Task, 'duration_secs': 0.289517} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.778854] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1136.778998] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37f34e38-9003-4205-9076-27c7f43bfa2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.788101] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1136.788101] env[68244]: value = "task-2781110" [ 1136.788101] env[68244]: _type = "Task" [ 1136.788101] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.797194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.797343] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.870293] env[68244]: INFO nova.compute.manager [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Took 18.14 seconds to build instance. [ 1136.979109] env[68244]: DEBUG oslo_concurrency.lockutils [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] Releasing lock "refresh_cache-ae8211ae-82bb-4a69-aa27-e81de2a06abe" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.979403] env[68244]: DEBUG nova.compute.manager [req-8b15c20c-63af-4ecb-9d0f-b02b431c1f22 req-cde7efc4-4b14-4b0d-93df-449b90762cd7 service nova] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Received event network-vif-deleted-f72f005b-3ff1-4910-9fdc-4d4b32362aaf {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1137.005234] env[68244]: DEBUG nova.network.neutron [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Successfully updated port: e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1137.013892] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c9419405-40c0-4d99-be3d-de17324ed184 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "cedcff81-0010-4fa6-95bf-72a4dcac5427" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.667s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.082312] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f96496-4591-e4a1-4c8d-f2208d2ca592, 'name': SearchDatastore_Task, 'duration_secs': 0.007844} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.083145] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee55e88d-1301-4fda-b571-6544179579a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.091145] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1137.091145] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5299dfaf-6b5c-2565-64ed-4b0a94c37bc2" [ 1137.091145] env[68244]: _type = "Task" [ 1137.091145] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.099514] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5299dfaf-6b5c-2565-64ed-4b0a94c37bc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.142332] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1137.142657] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-468e1536-1619-4d5c-8329-217b86136f0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.151516] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1137.151516] env[68244]: value = "task-2781111" [ 1137.151516] env[68244]: _type = "Task" [ 1137.151516] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.160379] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.258452] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811c2c24-49d4-4a37-9de5-d3cbe4f2a44d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.266656] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a867721d-ad8e-4ada-a6a3-d2c412608b6e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.273701] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cea638b5-0b88-47b8-b83c-70c69a0321bc tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "interface-d4fd9092-9081-4be0-b33d-c175be24f12e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.273989] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cea638b5-0b88-47b8-b83c-70c69a0321bc tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "interface-d4fd9092-9081-4be0-b33d-c175be24f12e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.274380] env[68244]: DEBUG nova.objects.instance [None req-cea638b5-0b88-47b8-b83c-70c69a0321bc tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lazy-loading 'flavor' on Instance uuid d4fd9092-9081-4be0-b33d-c175be24f12e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.305057] env[68244]: DEBUG nova.objects.instance [None req-cea638b5-0b88-47b8-b83c-70c69a0321bc tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lazy-loading 'pci_requests' on Instance uuid d4fd9092-9081-4be0-b33d-c175be24f12e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.310124] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ddf235-ba45-4d47-ac8a-5bcfb4b7654a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.316039] env[68244]: DEBUG nova.objects.base [None req-cea638b5-0b88-47b8-b83c-70c69a0321bc tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1137.316266] env[68244]: DEBUG nova.network.neutron [None req-cea638b5-0b88-47b8-b83c-70c69a0321bc tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.325629] env[68244]: DEBUG oslo_vmware.api [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781110, 'name': PowerOnVM_Task, 'duration_secs': 0.473212} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.327926] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1137.328196] env[68244]: INFO nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Took 7.34 seconds to spawn the instance on the hypervisor. [ 1137.328326] env[68244]: DEBUG nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.329418] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61542639-55f7-4d27-a003-02acfe287ead {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.333989] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82ae9b4-2b85-4c5d-a406-d440bdc3643e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.352766] env[68244]: DEBUG nova.compute.provider_tree [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.372483] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4a0c443e-238f-49fc-9562-5d4614e73417 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.655s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.419687] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cea638b5-0b88-47b8-b83c-70c69a0321bc tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "interface-d4fd9092-9081-4be0-b33d-c175be24f12e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 0.146s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.507891] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.508316] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.508316] env[68244]: DEBUG nova.network.neutron [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.602522] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5299dfaf-6b5c-2565-64ed-4b0a94c37bc2, 'name': SearchDatastore_Task, 'duration_secs': 0.00872} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.602867] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.603205] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ae8211ae-82bb-4a69-aa27-e81de2a06abe/ae8211ae-82bb-4a69-aa27-e81de2a06abe.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1137.603522] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe7b4579-816f-412d-bc03-35341e17f534 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.611081] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1137.611081] env[68244]: value = "task-2781113" [ 1137.611081] env[68244]: _type = "Task" [ 1137.611081] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.618645] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.663018] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781111, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.790395] env[68244]: DEBUG nova.compute.manager [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Received event network-vif-plugged-e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1137.790395] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.790578] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.790837] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.791047] env[68244]: DEBUG nova.compute.manager [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] No waiting events found dispatching network-vif-plugged-e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1137.791272] env[68244]: WARNING nova.compute.manager [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Received unexpected event network-vif-plugged-e73356d6-fa2a-49f0-b862-b5f1644c7579 for instance with vm_state building and task_state spawning. [ 1137.791426] env[68244]: DEBUG nova.compute.manager [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Received event network-changed-e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1137.791581] env[68244]: DEBUG nova.compute.manager [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Refreshing instance network info cache due to event network-changed-e73356d6-fa2a-49f0-b862-b5f1644c7579. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1137.791747] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] Acquiring lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.860085] env[68244]: DEBUG nova.scheduler.client.report [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.870515] env[68244]: INFO nova.compute.manager [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Took 18.43 seconds to build instance. [ 1138.041587] env[68244]: DEBUG nova.network.neutron [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1138.121538] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781113, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.163275] env[68244]: DEBUG oslo_vmware.api [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781111, 'name': PowerOnVM_Task, 'duration_secs': 0.56354} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.163536] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1138.163718] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc7ac35-d55c-42cd-8b48-dd05345fa209 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance 'df935885-c313-473d-aa3a-ba81aa999554' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1138.214015] env[68244]: DEBUG nova.network.neutron [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating instance_info_cache with network_info: [{"id": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "address": "fa:16:3e:fe:62:9d", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73356d6-fa", "ovs_interfaceid": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.369194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.372909] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.695s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.373291] env[68244]: DEBUG nova.objects.instance [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lazy-loading 'resources' on Instance uuid fc75039c-f2d0-4d4b-9a82-b605b6ba63d5 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.375596] env[68244]: DEBUG oslo_concurrency.lockutils [None req-422c85d7-a31a-481e-9adb-1d05265e4a13 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.948s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.403313] env[68244]: INFO nova.scheduler.client.report [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Deleted allocations for instance 91422c89-601c-4e5f-b5b0-fa2639031d3e [ 1138.623494] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781113, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.952011} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.623870] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ae8211ae-82bb-4a69-aa27-e81de2a06abe/ae8211ae-82bb-4a69-aa27-e81de2a06abe.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1138.623956] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1138.624214] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7e57203-ba6e-44d6-97b6-89749dc4837a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.631562] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1138.631562] env[68244]: value = "task-2781114" [ 1138.631562] env[68244]: _type = "Task" [ 1138.631562] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.639063] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781114, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.707093] env[68244]: DEBUG oslo_concurrency.lockutils [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.707355] env[68244]: DEBUG oslo_concurrency.lockutils [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.707623] env[68244]: DEBUG nova.compute.manager [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1138.708478] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead774c1-e149-4786-9aea-bac920faab85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.715087] env[68244]: DEBUG nova.compute.manager [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1138.715662] env[68244]: DEBUG nova.objects.instance [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lazy-loading 'flavor' on Instance uuid 8d0fa6cd-c14f-49ab-9595-396a10b4639a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.717064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.717338] env[68244]: DEBUG nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Instance network_info: |[{"id": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "address": "fa:16:3e:fe:62:9d", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73356d6-fa", "ovs_interfaceid": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1138.718023] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] Acquired lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.718023] env[68244]: DEBUG nova.network.neutron [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Refreshing network info cache for port e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1138.719292] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:62:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e73356d6-fa2a-49f0-b862-b5f1644c7579', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.726894] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Creating folder: Project (297edd7cb8934787b815a3230f85b139). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1138.728365] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adf40488-b6ab-4697-bca5-17487674ddc7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.738516] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Created folder: Project (297edd7cb8934787b815a3230f85b139) in parent group-v558876. [ 1138.738709] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Creating folder: Instances. Parent ref: group-v559148. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1138.739232] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2ff9c9e-17da-44c1-9e5b-d0e2cac35478 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.748133] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Created folder: Instances in parent group-v559148. [ 1138.748383] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.748574] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.748775] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15e01727-383f-4328-bb1a-a65e115eb879 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.767174] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.767174] env[68244]: value = "task-2781117" [ 1138.767174] env[68244]: _type = "Task" [ 1138.767174] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.774623] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781117, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.913507] env[68244]: DEBUG oslo_concurrency.lockutils [None req-aca6270c-1bc4-4f36-821e-503828829b99 tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "91422c89-601c-4e5f-b5b0-fa2639031d3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.042s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.103187] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f35bea9-abbf-4f3d-b911-323f37036e83 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.113087] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4c9e64-810d-49dc-89f0-72ee2048e58d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.149381] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8321b1-3c09-418c-8ccb-b9fca7231a67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.157206] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781114, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073209} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.159348] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.160239] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed028627-3932-4f42-a7c6-7a293d744176 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.163659] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3491a51-25ab-42ca-aa28-482269743505 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.188795] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] ae8211ae-82bb-4a69-aa27-e81de2a06abe/ae8211ae-82bb-4a69-aa27-e81de2a06abe.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.196858] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f85229d-ef2f-4835-89bb-8461baf04b5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.212196] env[68244]: DEBUG nova.compute.provider_tree [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.226561] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1139.226561] env[68244]: value = "task-2781118" [ 1139.226561] env[68244]: _type = "Task" [ 1139.226561] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.243779] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781118, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.283257] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781117, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.373830] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "d4fd9092-9081-4be0-b33d-c175be24f12e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.374168] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.374401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "d4fd9092-9081-4be0-b33d-c175be24f12e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.374611] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.374817] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.377597] env[68244]: INFO nova.compute.manager [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Terminating instance [ 1139.462046] env[68244]: DEBUG nova.network.neutron [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updated VIF entry in instance network info cache for port e73356d6-fa2a-49f0-b862-b5f1644c7579. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1139.462436] env[68244]: DEBUG nova.network.neutron [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating instance_info_cache with network_info: [{"id": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "address": "fa:16:3e:fe:62:9d", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73356d6-fa", "ovs_interfaceid": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.721719] env[68244]: DEBUG nova.scheduler.client.report [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.731246] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-788e77e1-a356-4342-9ff3-5ad13868fd77-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.731486] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-788e77e1-a356-4342-9ff3-5ad13868fd77-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.731793] env[68244]: DEBUG nova.objects.instance [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'flavor' on Instance uuid 788e77e1-a356-4342-9ff3-5ad13868fd77 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.747295] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.747583] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.747815] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-195d4ee7-00ed-4355-89ca-69c081c0057e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.755597] env[68244]: DEBUG oslo_vmware.api [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1139.755597] env[68244]: value = "task-2781119" [ 1139.755597] env[68244]: _type = "Task" [ 1139.755597] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.767319] env[68244]: DEBUG oslo_vmware.api [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.779965] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781117, 'name': CreateVM_Task, 'duration_secs': 0.898748} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.780244] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1139.781179] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.781428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.781866] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1139.782247] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af8f7f4a-c482-4da4-9071-3a3895a694d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.787999] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1139.787999] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cd1074-dce2-c84d-7f23-8fb3624d6f79" [ 1139.787999] env[68244]: _type = "Task" [ 1139.787999] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.799118] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cd1074-dce2-c84d-7f23-8fb3624d6f79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.893043] env[68244]: DEBUG nova.compute.manager [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1139.893208] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1139.894165] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b5394d-bb60-448c-aacd-013008c5d5bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.902584] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.902807] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9ea059d-50f9-47d4-9a76-80abbe6a3df5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.909246] env[68244]: DEBUG oslo_vmware.api [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1139.909246] env[68244]: value = "task-2781120" [ 1139.909246] env[68244]: _type = "Task" [ 1139.909246] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.917731] env[68244]: DEBUG oslo_vmware.api [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.965700] env[68244]: DEBUG oslo_concurrency.lockutils [req-cb89c589-524a-4dd0-8652-6a4d0099658f req-954ced6c-8ba7-4e7b-941f-3f05cd04da2c service nova] Releasing lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.039276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.039546] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.039754] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.039960] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.040154] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.042303] env[68244]: INFO nova.compute.manager [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Terminating instance [ 1140.234766] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.237277] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.440s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.237487] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.237642] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1140.241494] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d50356-fc15-4b71-b398-db172eb3642d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.252817] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781118, 'name': ReconfigVM_Task, 'duration_secs': 0.767695} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.255605] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Reconfigured VM instance instance-00000061 to attach disk [datastore2] ae8211ae-82bb-4a69-aa27-e81de2a06abe/ae8211ae-82bb-4a69-aa27-e81de2a06abe.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.255779] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9e42c8d-26dc-4629-b408-630248348dea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.258634] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a05e5d-a0f6-45f9-ab29-ad469bc2536c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.267091] env[68244]: INFO nova.scheduler.client.report [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Deleted allocations for instance fc75039c-f2d0-4d4b-9a82-b605b6ba63d5 [ 1140.280690] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edef99dd-ef4c-41c6-a344-38d62382691b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.283227] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1140.283227] env[68244]: value = "task-2781121" [ 1140.283227] env[68244]: _type = "Task" [ 1140.283227] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.286595] env[68244]: DEBUG oslo_vmware.api [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781119, 'name': PowerOffVM_Task, 'duration_secs': 0.224427} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.290620] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1140.290770] env[68244]: DEBUG nova.compute.manager [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.293341] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa3a6cf-86dd-42cb-9dfe-486c4c1bc457 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.300026] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358227e8-4ddb-4588-bb3d-9b6aec50d864 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.308533] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781121, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.341942] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178870MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1140.342053] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.342269] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.343979] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cd1074-dce2-c84d-7f23-8fb3624d6f79, 'name': SearchDatastore_Task, 'duration_secs': 0.01616} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.344747] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.348000] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.348000] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.348000] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.348000] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.348000] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f37c211-5a5e-49f3-99e1-d081200e3f53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.348549] env[68244]: DEBUG nova.objects.instance [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'pci_requests' on Instance uuid 788e77e1-a356-4342-9ff3-5ad13868fd77 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.358877] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.358877] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1140.358877] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-591c6e17-b466-4c77-bfaf-afa553d5cc95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.367520] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1140.367520] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a77624-d020-048f-f6d5-db030265de66" [ 1140.367520] env[68244]: _type = "Task" [ 1140.367520] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.375811] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a77624-d020-048f-f6d5-db030265de66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.419889] env[68244]: DEBUG oslo_vmware.api [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781120, 'name': PowerOffVM_Task, 'duration_secs': 0.261113} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.420194] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1140.420504] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1140.420761] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e07aa1f9-c53d-4c91-960a-dbbc10891f85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.485477] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1140.485477] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1140.485805] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Deleting the datastore file [datastore1] d4fd9092-9081-4be0-b33d-c175be24f12e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1140.486197] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de9b7635-e718-4fba-aec5-8f09b9529e35 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.493638] env[68244]: DEBUG oslo_vmware.api [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for the task: (returnval){ [ 1140.493638] env[68244]: value = "task-2781123" [ 1140.493638] env[68244]: _type = "Task" [ 1140.493638] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.501494] env[68244]: DEBUG oslo_vmware.api [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.546688] env[68244]: DEBUG nova.compute.manager [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1140.546688] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1140.546688] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489c1ebb-6e22-4c5a-ba18-d5bd76ce4c18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.555066] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.555391] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d195488-7729-4224-bafe-4ac5d360ca91 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.561251] env[68244]: DEBUG oslo_vmware.api [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1140.561251] env[68244]: value = "task-2781124" [ 1140.561251] env[68244]: _type = "Task" [ 1140.561251] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.569568] env[68244]: DEBUG oslo_vmware.api [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.793331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a2ca081c-cd2d-4a45-afc8-2290f024b633 tempest-ServersTestJSON-1929471806 tempest-ServersTestJSON-1929471806-project-member] Lock "fc75039c-f2d0-4d4b-9a82-b605b6ba63d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.227s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.801182] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781121, 'name': Rename_Task, 'duration_secs': 0.174945} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.801182] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1140.801365] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38c7cb29-ee57-4a2d-a9d6-f29e13cd4741 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.809029] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1140.809029] env[68244]: value = "task-2781125" [ 1140.809029] env[68244]: _type = "Task" [ 1140.809029] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.817104] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.855444] env[68244]: DEBUG oslo_concurrency.lockutils [None req-221afcc3-cb7c-464b-96a7-2881dcfb42d2 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.147s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.856025] env[68244]: DEBUG nova.objects.base [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Object Instance<788e77e1-a356-4342-9ff3-5ad13868fd77> lazy-loaded attributes: flavor,pci_requests {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1140.856222] env[68244]: DEBUG nova.network.neutron [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1140.880238] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a77624-d020-048f-f6d5-db030265de66, 'name': SearchDatastore_Task, 'duration_secs': 0.01191} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.881122] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea21eb09-f235-4c8a-bc17-6c6090a858e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.886860] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1140.886860] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5264d236-0772-ce91-3492-2798871580aa" [ 1140.886860] env[68244]: _type = "Task" [ 1140.886860] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.895081] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5264d236-0772-ce91-3492-2798871580aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.902318] env[68244]: DEBUG nova.policy [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1141.006030] env[68244]: DEBUG oslo_vmware.api [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Task: {'id': task-2781123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.006266] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1141.006518] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1141.006768] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1141.007192] env[68244]: INFO nova.compute.manager [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1141.007580] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.009904] env[68244]: DEBUG nova.compute.manager [-] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1141.010061] env[68244]: DEBUG nova.network.neutron [-] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1141.072850] env[68244]: DEBUG oslo_vmware.api [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781124, 'name': PowerOffVM_Task, 'duration_secs': 0.394393} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.073131] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1141.073330] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1141.073562] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f3e26d2-c4ff-43b2-9be9-3ef5ebe5bedd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.158215] env[68244]: DEBUG nova.network.neutron [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Port 9389f00b-7d76-4743-9f6d-d9af08918ce6 binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1141.158555] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.158738] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.158916] env[68244]: DEBUG nova.network.neutron [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.200786] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1141.201016] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1141.201215] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Deleting the datastore file [datastore2] a50d505f-92f2-4759-ab8f-1bf4c9708b1a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.201498] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03800df8-f835-450d-8050-ee67b480ccf0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.207623] env[68244]: DEBUG oslo_vmware.api [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for the task: (returnval){ [ 1141.207623] env[68244]: value = "task-2781127" [ 1141.207623] env[68244]: _type = "Task" [ 1141.207623] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.215886] env[68244]: DEBUG oslo_vmware.api [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.254542] env[68244]: DEBUG nova.network.neutron [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Successfully created port: e701e429-b2ba-44bd-a482-df72b133b5fc {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1141.319185] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781125, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.359749] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Applying migration context for instance df935885-c313-473d-aa3a-ba81aa999554 as it has an incoming, in-progress migration fdae44ec-31ae-4e7f-8b05-aba60785fd58. Migration status is reverting {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1141.362010] env[68244]: INFO nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating resource usage from migration fdae44ec-31ae-4e7f-8b05-aba60785fd58 [ 1141.383066] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 2aacd21f-d664-4267-8331-d3862f43d35b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.383204] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e2099d6d-5ab7-4a3e-8034-a3b4fc422749 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.383403] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 7778c027-d4af-436c-a545-aa513c0b1127 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.383480] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.383587] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.383720] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a50d505f-92f2-4759-ab8f-1bf4c9708b1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.383856] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d46f6695-7a96-4e0b-b43a-236bcb4ec519 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.383980] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 788e77e1-a356-4342-9ff3-5ad13868fd77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.384228] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 75bec02f-82f7-4e8d-81da-3c511588be29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.384324] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 10e67250-5ddc-430d-aac7-4e6bae0778e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.384437] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Migration fdae44ec-31ae-4e7f-8b05-aba60785fd58 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1141.384549] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance df935885-c313-473d-aa3a-ba81aa999554 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.384659] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d4fd9092-9081-4be0-b33d-c175be24f12e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.384761] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8d0fa6cd-c14f-49ab-9595-396a10b4639a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.385046] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ae8211ae-82bb-4a69-aa27-e81de2a06abe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.385046] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance dfe017bb-d860-4da6-abe5-7e8d7a7dd05a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1141.385408] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1141.385471] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1141.398615] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5264d236-0772-ce91-3492-2798871580aa, 'name': SearchDatastore_Task, 'duration_secs': 0.01742} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.398863] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.399132] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] dfe017bb-d860-4da6-abe5-7e8d7a7dd05a/dfe017bb-d860-4da6-abe5-7e8d7a7dd05a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1141.399817] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f151e32c-13a5-4c05-a020-6f3d45e846aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.406871] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1141.406871] env[68244]: value = "task-2781128" [ 1141.406871] env[68244]: _type = "Task" [ 1141.406871] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.414858] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.447058] env[68244]: DEBUG nova.compute.manager [req-ec705e53-c6e2-48de-8a75-5e9465144c5b req-6fdf68e2-de25-4bfa-bc85-2264723ff1c9 service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Received event network-vif-deleted-6809cc9e-9900-43c7-850f-f17a020b6c34 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1141.447058] env[68244]: INFO nova.compute.manager [req-ec705e53-c6e2-48de-8a75-5e9465144c5b req-6fdf68e2-de25-4bfa-bc85-2264723ff1c9 service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Neutron deleted interface 6809cc9e-9900-43c7-850f-f17a020b6c34; detaching it from the instance and deleting it from the info cache [ 1141.447058] env[68244]: DEBUG nova.network.neutron [req-ec705e53-c6e2-48de-8a75-5e9465144c5b req-6fdf68e2-de25-4bfa-bc85-2264723ff1c9 service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.655479] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a693bdc-6f6d-4ea7-8d13-29d1ce74c2ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.667063] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103432be-af47-4178-a17c-cb5eee0666f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.429990] env[68244]: DEBUG nova.network.neutron [-] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "7778c027-d4af-436c-a545-aa513c0b1127" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "7778c027-d4af-436c-a545-aa513c0b1127" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "7778c027-d4af-436c-a545-aa513c0b1127-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "7778c027-d4af-436c-a545-aa513c0b1127-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.435027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "7778c027-d4af-436c-a545-aa513c0b1127-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.438134] env[68244]: INFO nova.compute.manager [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Terminating instance [ 1142.447035] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a128383-96c3-407f-a81c-1ae5966db0a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.448979] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f935bb0-643b-46d4-9d1f-cf527f9b604d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.452023] env[68244]: INFO nova.compute.manager [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Terminating instance [ 1142.460810] env[68244]: DEBUG oslo_vmware.api [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Task: {'id': task-2781127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343359} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.469198] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.470121] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1142.470121] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1142.470121] env[68244]: INFO nova.compute.manager [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Took 1.92 seconds to destroy the instance on the hypervisor. [ 1142.470121] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1142.470403] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680499} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.470694] env[68244]: DEBUG oslo_vmware.api [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781125, 'name': PowerOnVM_Task, 'duration_secs': 0.749264} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.470881] env[68244]: DEBUG nova.compute.manager [-] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1142.470973] env[68244]: DEBUG nova.network.neutron [-] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1142.472718] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] dfe017bb-d860-4da6-abe5-7e8d7a7dd05a/dfe017bb-d860-4da6-abe5-7e8d7a7dd05a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1142.472925] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1142.476745] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91764faf-9760-480f-91ac-8922a9c3c165 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.486747] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b3e8d2-7fdb-4207-823f-68420310acbb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.490502] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1142.490631] env[68244]: INFO nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Took 9.06 seconds to spawn the instance on the hypervisor. [ 1142.490776] env[68244]: DEBUG nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1142.491039] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fac5a78d-9040-4f07-ab5d-0f68ba168887 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.493309] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f61baa4-c047-44a8-9172-f6c0446aebfa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.507272] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.525670] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1142.525670] env[68244]: value = "task-2781129" [ 1142.525670] env[68244]: _type = "Task" [ 1142.525670] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.526628] env[68244]: DEBUG nova.compute.manager [req-ec705e53-c6e2-48de-8a75-5e9465144c5b req-6fdf68e2-de25-4bfa-bc85-2264723ff1c9 service nova] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Detach interface failed, port_id=6809cc9e-9900-43c7-850f-f17a020b6c34, reason: Instance d4fd9092-9081-4be0-b33d-c175be24f12e could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1142.533031] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1142.546210] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781129, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.791727] env[68244]: DEBUG nova.compute.manager [req-7684a551-676d-49bb-87ae-aa107655527d req-fc584213-1ec6-4024-9c6b-1d0cc334b7d8 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received event network-vif-plugged-e701e429-b2ba-44bd-a482-df72b133b5fc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1142.791948] env[68244]: DEBUG oslo_concurrency.lockutils [req-7684a551-676d-49bb-87ae-aa107655527d req-fc584213-1ec6-4024-9c6b-1d0cc334b7d8 service nova] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.793056] env[68244]: DEBUG oslo_concurrency.lockutils [req-7684a551-676d-49bb-87ae-aa107655527d req-fc584213-1ec6-4024-9c6b-1d0cc334b7d8 service nova] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.793056] env[68244]: DEBUG oslo_concurrency.lockutils [req-7684a551-676d-49bb-87ae-aa107655527d req-fc584213-1ec6-4024-9c6b-1d0cc334b7d8 service nova] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.793056] env[68244]: DEBUG nova.compute.manager [req-7684a551-676d-49bb-87ae-aa107655527d req-fc584213-1ec6-4024-9c6b-1d0cc334b7d8 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] No waiting events found dispatching network-vif-plugged-e701e429-b2ba-44bd-a482-df72b133b5fc {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1142.793056] env[68244]: WARNING nova.compute.manager [req-7684a551-676d-49bb-87ae-aa107655527d req-fc584213-1ec6-4024-9c6b-1d0cc334b7d8 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received unexpected event network-vif-plugged-e701e429-b2ba-44bd-a482-df72b133b5fc for instance with vm_state active and task_state None. [ 1142.830409] env[68244]: DEBUG nova.network.neutron [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.918334] env[68244]: DEBUG nova.network.neutron [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Successfully updated port: e701e429-b2ba-44bd-a482-df72b133b5fc {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1142.941626] env[68244]: INFO nova.compute.manager [-] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Took 1.93 seconds to deallocate network for instance. [ 1142.959246] env[68244]: DEBUG nova.compute.manager [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.959246] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.959246] env[68244]: DEBUG nova.compute.manager [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.959246] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.959246] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e1614e-2624-4e9c-8f11-6d1fe69bcd67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.962552] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d91800-a549-4dd7-a2f9-13334280cf43 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.974023] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.974023] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.974023] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69ebf9de-72c8-4256-8c0d-7c04cec3378f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.975447] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7395124-2ee3-42fa-a673-00b76b1621d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.985051] env[68244]: DEBUG oslo_vmware.api [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1142.985051] env[68244]: value = "task-2781131" [ 1142.985051] env[68244]: _type = "Task" [ 1142.985051] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.991550] env[68244]: DEBUG oslo_vmware.api [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.038835] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1143.038835] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.696s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.045236] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1143.045441] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1143.045621] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleting the datastore file [datastore2] 8d0fa6cd-c14f-49ab-9595-396a10b4639a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.046083] env[68244]: INFO nova.compute.manager [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Took 17.39 seconds to build instance. [ 1143.046894] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b6eba36-f6a0-45f7-a61c-3e3d1e222ee3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.052015] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070254} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.052594] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1143.053387] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d38abc6-fcf3-468d-9b53-ab6f57cfb6ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.057198] env[68244]: DEBUG oslo_vmware.api [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1143.057198] env[68244]: value = "task-2781132" [ 1143.057198] env[68244]: _type = "Task" [ 1143.057198] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.078572] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] dfe017bb-d860-4da6-abe5-7e8d7a7dd05a/dfe017bb-d860-4da6-abe5-7e8d7a7dd05a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1143.079870] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2844a0fb-c1bc-4a8a-ad06-2cd660a66bdc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.097553] env[68244]: DEBUG oslo_vmware.api [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.102780] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1143.102780] env[68244]: value = "task-2781133" [ 1143.102780] env[68244]: _type = "Task" [ 1143.102780] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.111732] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.332804] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.370222] env[68244]: DEBUG nova.network.neutron [-] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.419552] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.419766] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.419901] env[68244]: DEBUG nova.network.neutron [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1143.446778] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.447933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.447933] env[68244]: DEBUG nova.objects.instance [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lazy-loading 'resources' on Instance uuid d4fd9092-9081-4be0-b33d-c175be24f12e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.492621] env[68244]: DEBUG oslo_vmware.api [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781131, 'name': PowerOffVM_Task, 'duration_secs': 0.26855} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.492621] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1143.492999] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1143.492999] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efe863b1-ac63-4cfb-b24d-3f18cc8de781 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.549953] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e682cc21-6136-42fd-80ad-ed7e7e9a7703 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.903s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.567708] env[68244]: DEBUG oslo_vmware.api [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278089} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.567979] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1143.568181] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1143.570161] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1143.570161] env[68244]: INFO nova.compute.manager [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1143.570161] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.570161] env[68244]: DEBUG nova.compute.manager [-] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1143.570161] env[68244]: DEBUG nova.network.neutron [-] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1143.572298] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1143.572512] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1143.572663] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleting the datastore file [datastore2] 7778c027-d4af-436c-a545-aa513c0b1127 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.573252] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef585b60-0940-4bef-b665-ac253b665f88 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.580052] env[68244]: DEBUG oslo_vmware.api [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1143.580052] env[68244]: value = "task-2781135" [ 1143.580052] env[68244]: _type = "Task" [ 1143.580052] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.588642] env[68244]: DEBUG oslo_vmware.api [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781135, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.612769] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.836680] env[68244]: DEBUG nova.compute.manager [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68244) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1143.878640] env[68244]: INFO nova.compute.manager [-] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Took 1.41 seconds to deallocate network for instance. [ 1143.957420] env[68244]: WARNING nova.network.neutron [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] 30f3eb09-0134-4d69-88c7-1e99db7c1d78 already exists in list: networks containing: ['30f3eb09-0134-4d69-88c7-1e99db7c1d78']. ignoring it [ 1144.097131] env[68244]: DEBUG oslo_vmware.api [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781135, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.493645} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.097131] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.097281] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.097457] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.097629] env[68244]: INFO nova.compute.manager [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1144.097863] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1144.098263] env[68244]: DEBUG nova.compute.manager [-] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1144.098393] env[68244]: DEBUG nova.network.neutron [-] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1144.115303] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781133, 'name': ReconfigVM_Task, 'duration_secs': 0.723027} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.118065] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Reconfigured VM instance instance-00000062 to attach disk [datastore2] dfe017bb-d860-4da6-abe5-7e8d7a7dd05a/dfe017bb-d860-4da6-abe5-7e8d7a7dd05a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.118994] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e8c1c57-c121-4589-911c-811b5b411ef7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.125093] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1144.125093] env[68244]: value = "task-2781136" [ 1144.125093] env[68244]: _type = "Task" [ 1144.125093] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.134728] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781136, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.217540] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d971a8-4d97-4acf-a16b-1c9b87ff5909 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.230959] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6886dba2-1d6c-4763-97e7-c9212142bbd4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.268014] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baff8e6b-00c5-4e2f-841d-d91c4d92d146 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.279059] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff6ff38-5294-4528-9c5d-d90c1b220149 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.295800] env[68244]: DEBUG nova.compute.provider_tree [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.297324] env[68244]: DEBUG nova.network.neutron [-] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.387025] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.413834] env[68244]: DEBUG nova.network.neutron [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [{"id": "e2daf964-3dca-4df6-b310-952aab3796a9", "address": "fa:16:3e:7e:5f:a7", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2daf964-3d", "ovs_interfaceid": "e2daf964-3dca-4df6-b310-952aab3796a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e701e429-b2ba-44bd-a482-df72b133b5fc", "address": "fa:16:3e:bc:1d:12", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape701e429-b2", "ovs_interfaceid": "e701e429-b2ba-44bd-a482-df72b133b5fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.635206] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781136, 'name': Rename_Task, 'duration_secs': 0.186267} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.635911] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1144.636330] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0357872-b641-4ba9-aa7a-d0ba0a39950c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.642670] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1144.642670] env[68244]: value = "task-2781137" [ 1144.642670] env[68244]: _type = "Task" [ 1144.642670] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.651945] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.723668] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.723923] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.724149] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.724641] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.724641] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.726660] env[68244]: INFO nova.compute.manager [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Terminating instance [ 1144.799826] env[68244]: DEBUG nova.scheduler.client.report [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.803314] env[68244]: INFO nova.compute.manager [-] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Took 1.23 seconds to deallocate network for instance. [ 1144.916754] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.919370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.919370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.919370] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6743a5-605e-444a-9f08-7416d91c304f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.935814] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1144.936049] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1144.936204] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1144.936384] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1144.936524] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1144.936801] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1144.936872] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1144.937019] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1144.937818] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1144.937818] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1144.937818] env[68244]: DEBUG nova.virt.hardware [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1144.943895] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Reconfiguring VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1144.944225] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1114efa7-d0d2-4fd3-80cd-d1d2b11a5648 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.962109] env[68244]: DEBUG oslo_vmware.api [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1144.962109] env[68244]: value = "task-2781138" [ 1144.962109] env[68244]: _type = "Task" [ 1144.962109] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.969783] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.970243] env[68244]: DEBUG oslo_vmware.api [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781138, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.005281] env[68244]: DEBUG nova.network.neutron [-] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.078211] env[68244]: DEBUG nova.compute.manager [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Received event network-vif-deleted-c01b444b-a422-42c6-8ac3-cf0ce71f1d20 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1145.078682] env[68244]: DEBUG nova.compute.manager [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received event network-changed-e701e429-b2ba-44bd-a482-df72b133b5fc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1145.078887] env[68244]: DEBUG nova.compute.manager [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Refreshing instance network info cache due to event network-changed-e701e429-b2ba-44bd-a482-df72b133b5fc. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1145.079459] env[68244]: DEBUG oslo_concurrency.lockutils [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] Acquiring lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.079624] env[68244]: DEBUG oslo_concurrency.lockutils [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] Acquired lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.079786] env[68244]: DEBUG nova.network.neutron [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Refreshing network info cache for port e701e429-b2ba-44bd-a482-df72b133b5fc {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.154170] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781137, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.230738] env[68244]: DEBUG nova.compute.manager [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1145.230918] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1145.231876] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ebf704-b863-4a71-9a8a-025a847da83d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.240134] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1145.240478] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7a85704-eee9-46bc-aa09-390e44716d98 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.250684] env[68244]: DEBUG oslo_vmware.api [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1145.250684] env[68244]: value = "task-2781139" [ 1145.250684] env[68244]: _type = "Task" [ 1145.250684] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.258654] env[68244]: DEBUG oslo_vmware.api [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.307862] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.860s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.310628] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.925s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.311089] env[68244]: DEBUG nova.objects.instance [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lazy-loading 'resources' on Instance uuid a50d505f-92f2-4759-ab8f-1bf4c9708b1a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.312620] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.330555] env[68244]: INFO nova.scheduler.client.report [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Deleted allocations for instance d4fd9092-9081-4be0-b33d-c175be24f12e [ 1145.473764] env[68244]: DEBUG oslo_vmware.api [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781138, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.508603] env[68244]: INFO nova.compute.manager [-] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Took 1.41 seconds to deallocate network for instance. [ 1145.654594] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781137, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.763245] env[68244]: DEBUG oslo_vmware.api [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781139, 'name': PowerOffVM_Task, 'duration_secs': 0.319685} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.763582] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1145.763681] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1145.763923] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f54bf586-9f3c-42f7-9a1f-70528e5fc82d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.806429] env[68244]: DEBUG nova.network.neutron [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updated VIF entry in instance network info cache for port e701e429-b2ba-44bd-a482-df72b133b5fc. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.806985] env[68244]: DEBUG nova.network.neutron [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [{"id": "e2daf964-3dca-4df6-b310-952aab3796a9", "address": "fa:16:3e:7e:5f:a7", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2daf964-3d", "ovs_interfaceid": "e2daf964-3dca-4df6-b310-952aab3796a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e701e429-b2ba-44bd-a482-df72b133b5fc", "address": "fa:16:3e:bc:1d:12", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape701e429-b2", "ovs_interfaceid": "e701e429-b2ba-44bd-a482-df72b133b5fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.839380] env[68244]: DEBUG oslo_concurrency.lockutils [None req-95f85e3b-e84f-4f4c-9203-c451cb107b78 tempest-AttachInterfacesV270Test-1798585359 tempest-AttachInterfacesV270Test-1798585359-project-member] Lock "d4fd9092-9081-4be0-b33d-c175be24f12e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.465s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.843793] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1145.844190] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1145.844190] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Deleting the datastore file [datastore2] ae8211ae-82bb-4a69-aa27-e81de2a06abe {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1145.844945] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d579c079-cf51-4af5-ad83-f38df255affc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.851896] env[68244]: DEBUG oslo_vmware.api [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for the task: (returnval){ [ 1145.851896] env[68244]: value = "task-2781141" [ 1145.851896] env[68244]: _type = "Task" [ 1145.851896] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.862714] env[68244]: DEBUG oslo_vmware.api [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.973138] env[68244]: DEBUG oslo_vmware.api [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781138, 'name': ReconfigVM_Task, 'duration_secs': 0.588163} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.973617] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.974367] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Reconfigured VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1146.016246] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.046834] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c62e41-8c0a-4930-bcb7-82cdf21da0c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.054614] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4be3d3-5586-4acc-9465-b088c5bc4cc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.062041] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.062171] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.089729] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4177665-42de-45dc-b5f5-4a2840abf24c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.098275] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d3285b-c19a-477a-a26c-4d2ca25117d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.114535] env[68244]: DEBUG nova.compute.provider_tree [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.155012] env[68244]: DEBUG oslo_vmware.api [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781137, 'name': PowerOnVM_Task, 'duration_secs': 1.358477} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.155012] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1146.155012] env[68244]: INFO nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1146.155242] env[68244]: DEBUG nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.155908] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30024a79-6570-45f7-a484-3c7b3f1d7c44 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.310188] env[68244]: DEBUG oslo_concurrency.lockutils [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] Releasing lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.310637] env[68244]: DEBUG nova.compute.manager [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Received event network-vif-deleted-308e90b2-5cb3-40d0-a9cc-7a2cfb73c7b9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1146.310830] env[68244]: DEBUG nova.compute.manager [req-4bc00ba8-9776-4913-a27c-b6c80609e177 req-1b213909-a4ba-41d8-95b0-3507a132509d service nova] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Received event network-vif-deleted-a3b7e2c4-a30c-49b8-bbb5-2a4ec7a6d69a {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1146.359326] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.359700] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.368698] env[68244]: DEBUG oslo_vmware.api [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Task: {'id': task-2781141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.434931} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.369121] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1146.369417] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1146.369715] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1146.369997] env[68244]: INFO nova.compute.manager [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1146.370389] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1146.370686] env[68244]: DEBUG nova.compute.manager [-] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1146.370842] env[68244]: DEBUG nova.network.neutron [-] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1146.481669] env[68244]: DEBUG oslo_concurrency.lockutils [None req-004753c5-580c-40b7-9a0c-a4db2704bc45 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-788e77e1-a356-4342-9ff3-5ad13868fd77-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.750s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.564476] env[68244]: DEBUG nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1146.620139] env[68244]: DEBUG nova.scheduler.client.report [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.673922] env[68244]: INFO nova.compute.manager [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Took 15.48 seconds to build instance. [ 1146.864370] env[68244]: DEBUG nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1147.088818] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.128292] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.129382] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.159s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.150738] env[68244]: INFO nova.scheduler.client.report [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Deleted allocations for instance a50d505f-92f2-4759-ab8f-1bf4c9708b1a [ 1147.177589] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4ef8db54-11dc-442b-b9df-dfabeeeb3c28 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.993s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.258435] env[68244]: DEBUG nova.network.neutron [-] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.377409] env[68244]: DEBUG nova.compute.manager [req-7a9fe795-999e-4da7-ba95-7e9c99ce52ff req-b05603d4-28c9-4f68-a392-34866f6555ee service nova] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Received event network-vif-deleted-46f33af8-00d6-49df-b0a3-3bc05bdff4d1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1147.390654] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.636711] env[68244]: DEBUG nova.objects.instance [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'migration_context' on Instance uuid df935885-c313-473d-aa3a-ba81aa999554 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.659233] env[68244]: DEBUG oslo_concurrency.lockutils [None req-da83a5ff-f0c1-44f5-b67a-4603c0f07b6b tempest-ServerRescueTestJSON-2081540688 tempest-ServerRescueTestJSON-2081540688-project-member] Lock "a50d505f-92f2-4759-ab8f-1bf4c9708b1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.617s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.763572] env[68244]: INFO nova.compute.manager [-] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Took 1.39 seconds to deallocate network for instance. [ 1148.164404] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-788e77e1-a356-4342-9ff3-5ad13868fd77-e701e429-b2ba-44bd-a482-df72b133b5fc" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.164873] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-788e77e1-a356-4342-9ff3-5ad13868fd77-e701e429-b2ba-44bd-a482-df72b133b5fc" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.270900] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.401890] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63937cf0-3c3b-4580-a3a9-82c3ad04a77e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.410350] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71905222-7d3d-48de-973d-69966703416a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.441791] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76275b6-61eb-473c-b1c4-0aa9752ba505 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.450030] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cc7ed8-4470-47b4-9d40-f5dbcf70f32a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.463131] env[68244]: DEBUG nova.compute.provider_tree [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.668789] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.669075] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.670099] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6f651a-3d0e-451d-8da2-ced171f431b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.698944] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7b5817-1f12-4b53-832c-3dc4558e0080 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.731296] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Reconfiguring VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1148.732025] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0e159ab-f41f-49ce-ab0b-7c095b6e2681 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.754438] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1148.754438] env[68244]: value = "task-2781142" [ 1148.754438] env[68244]: _type = "Task" [ 1148.754438] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.763717] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.966798] env[68244]: DEBUG nova.scheduler.client.report [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1149.267828] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.429105] env[68244]: DEBUG nova.compute.manager [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Received event network-changed-e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1149.429313] env[68244]: DEBUG nova.compute.manager [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Refreshing instance network info cache due to event network-changed-e73356d6-fa2a-49f0-b862-b5f1644c7579. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1149.429528] env[68244]: DEBUG oslo_concurrency.lockutils [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] Acquiring lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.429673] env[68244]: DEBUG oslo_concurrency.lockutils [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] Acquired lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.429836] env[68244]: DEBUG nova.network.neutron [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Refreshing network info cache for port e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.768128] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.978923] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.850s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.985574] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.673s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.985652] env[68244]: DEBUG nova.objects.instance [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lazy-loading 'resources' on Instance uuid 8d0fa6cd-c14f-49ab-9595-396a10b4639a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.266979] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.273386] env[68244]: DEBUG nova.network.neutron [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updated VIF entry in instance network info cache for port e73356d6-fa2a-49f0-b862-b5f1644c7579. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.273833] env[68244]: DEBUG nova.network.neutron [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating instance_info_cache with network_info: [{"id": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "address": "fa:16:3e:fe:62:9d", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73356d6-fa", "ovs_interfaceid": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.766737] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.769800] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82bea62-a81c-4185-a295-af9959feb802 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.776220] env[68244]: DEBUG oslo_concurrency.lockutils [req-98a091bd-4b88-4a0a-be61-1cb149c4a776 req-ce505b5d-d502-4a81-b045-6837c375eca5 service nova] Releasing lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.777353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d56633-ab65-4243-ab48-26378c978eec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.814021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f30977-cf70-4154-9fef-e3df04e1b19b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.821431] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327b1b38-7bc5-410f-9e6a-91b734e29451 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.835848] env[68244]: DEBUG nova.compute.provider_tree [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.267440] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.339116] env[68244]: DEBUG nova.scheduler.client.report [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.529422] env[68244]: INFO nova.compute.manager [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Swapping old allocation on dict_keys(['b885cb16-3bd4-46d8-abd9-28a1bf1058e3']) held by migration fdae44ec-31ae-4e7f-8b05-aba60785fd58 for instance [ 1151.558167] env[68244]: DEBUG nova.scheduler.client.report [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Overwriting current allocation {'allocations': {'b885cb16-3bd4-46d8-abd9-28a1bf1058e3': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 146}}, 'project_id': '207109eb01bd42b081cc66385789ab80', 'user_id': '813e863e39a449dd915ef45aa553cdab', 'consumer_generation': 1} on consumer df935885-c313-473d-aa3a-ba81aa999554 {{(pid=68244) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1151.662093] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.662303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.662482] env[68244]: DEBUG nova.network.neutron [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1151.772979] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.851776] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.866s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.854272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.838s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.854553] env[68244]: DEBUG nova.objects.instance [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lazy-loading 'resources' on Instance uuid 7778c027-d4af-436c-a545-aa513c0b1127 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.879485] env[68244]: INFO nova.scheduler.client.report [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted allocations for instance 8d0fa6cd-c14f-49ab-9595-396a10b4639a [ 1152.269809] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.392715] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ebe7d80b-e7a1-4e28-b0eb-665664935617 tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "8d0fa6cd-c14f-49ab-9595-396a10b4639a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.961s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.429795] env[68244]: DEBUG nova.network.neutron [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [{"id": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "address": "fa:16:3e:c7:cf:58", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9389f00b-7d", "ovs_interfaceid": "9389f00b-7d76-4743-9f6d-d9af08918ce6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.550862] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91672eac-22d2-422a-aaa9-8c7380dfc5b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.558459] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1569240-edda-41d4-9af1-fbade3f135d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.590218] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c658c38-c4c7-457a-824e-5221fd86b58e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.597303] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3cc074-a346-4df6-b13e-35820e839845 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.614484] env[68244]: DEBUG nova.compute.provider_tree [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.719567] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.719567] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.771712] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.932686] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-df935885-c313-473d-aa3a-ba81aa999554" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.935070] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb069aa-9f75-4849-8c63-dda36fc18af8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.942813] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225ed723-d351-4032-9650-1f62d631f6f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.117308] env[68244]: DEBUG nova.scheduler.client.report [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.222055] env[68244]: DEBUG nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1153.274429] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.622925] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.625244] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.537s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.627166] env[68244]: INFO nova.compute.claims [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1153.645694] env[68244]: INFO nova.scheduler.client.report [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleted allocations for instance 7778c027-d4af-436c-a545-aa513c0b1127 [ 1153.741044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.771906] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.814081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "2aacd21f-d664-4267-8331-d3862f43d35b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.814081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "2aacd21f-d664-4267-8331-d3862f43d35b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.814081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "2aacd21f-d664-4267-8331-d3862f43d35b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.814351] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "2aacd21f-d664-4267-8331-d3862f43d35b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.814456] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "2aacd21f-d664-4267-8331-d3862f43d35b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.816666] env[68244]: INFO nova.compute.manager [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Terminating instance [ 1154.044885] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.044885] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf14d239-1553-4a5e-a1f1-a7734e4a41e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.053366] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1154.053366] env[68244]: value = "task-2781143" [ 1154.053366] env[68244]: _type = "Task" [ 1154.053366] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.060883] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.152716] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e578b166-af91-43fb-af75-edfa785516fa tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "7778c027-d4af-436c-a545-aa513c0b1127" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.718s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.272558] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.321182] env[68244]: DEBUG nova.compute.manager [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1154.321418] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1154.322346] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f99ac2-c8ff-469d-bf3e-cf9328ee92b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.329611] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.329849] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f22b724f-b117-42c0-8d41-f30803bc33f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.335579] env[68244]: DEBUG oslo_vmware.api [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1154.335579] env[68244]: value = "task-2781144" [ 1154.335579] env[68244]: _type = "Task" [ 1154.335579] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.346285] env[68244]: DEBUG oslo_vmware.api [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.563140] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781143, 'name': PowerOffVM_Task, 'duration_secs': 0.206866} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.563366] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1154.564222] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1154.564354] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1154.564419] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1154.564601] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1154.564764] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1154.564883] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1154.565128] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1154.565298] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1154.565510] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1154.565712] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1154.565864] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1154.572040] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd96fcdc-dbe3-4e56-9a40-8ffa571d578e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.587957] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1154.587957] env[68244]: value = "task-2781145" [ 1154.587957] env[68244]: _type = "Task" [ 1154.587957] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.595687] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.774065] env[68244]: DEBUG oslo_vmware.api [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781142, 'name': ReconfigVM_Task, 'duration_secs': 5.84867} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.776215] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.776428] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Reconfigured VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1154.806162] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247f6cf6-8024-40d1-aa5f-467b051fc77d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.813042] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed25e55-44fd-4abd-b39d-88c36ca3edf2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.845764] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd991b9-09b2-403b-b0cb-22157bbc225a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.856028] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7cc3cd-9d55-4401-bf6a-5edf4149dba9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.859436] env[68244]: DEBUG oslo_vmware.api [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781144, 'name': PowerOffVM_Task, 'duration_secs': 0.242549} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.859770] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1154.859861] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1154.860455] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfb6ecd5-e057-434e-821a-e3eb225e68da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.869909] env[68244]: DEBUG nova.compute.provider_tree [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.928404] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1154.928502] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1154.928658] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleting the datastore file [datastore2] 2aacd21f-d664-4267-8331-d3862f43d35b {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1154.928908] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31f6fdab-b263-4f6d-9d87-8083a4a1d9f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.935239] env[68244]: DEBUG oslo_vmware.api [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for the task: (returnval){ [ 1154.935239] env[68244]: value = "task-2781147" [ 1154.935239] env[68244]: _type = "Task" [ 1154.935239] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.943142] env[68244]: DEBUG oslo_vmware.api [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.976268] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.976547] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.977027] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.977380] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.977672] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.981224] env[68244]: INFO nova.compute.manager [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Terminating instance [ 1155.078650] env[68244]: DEBUG nova.compute.manager [req-a7ea96c9-d1c3-40c5-bf60-59455be5b5fd req-fe90f402-f76c-45b9-a799-75f31dca5abf service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received event network-vif-deleted-e701e429-b2ba-44bd-a482-df72b133b5fc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1155.078932] env[68244]: INFO nova.compute.manager [req-a7ea96c9-d1c3-40c5-bf60-59455be5b5fd req-fe90f402-f76c-45b9-a799-75f31dca5abf service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Neutron deleted interface e701e429-b2ba-44bd-a482-df72b133b5fc; detaching it from the instance and deleting it from the info cache [ 1155.079128] env[68244]: DEBUG nova.network.neutron [req-a7ea96c9-d1c3-40c5-bf60-59455be5b5fd req-fe90f402-f76c-45b9-a799-75f31dca5abf service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [{"id": "e2daf964-3dca-4df6-b310-952aab3796a9", "address": "fa:16:3e:7e:5f:a7", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2daf964-3d", "ovs_interfaceid": "e2daf964-3dca-4df6-b310-952aab3796a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.100051] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781145, 'name': ReconfigVM_Task, 'duration_secs': 0.148385} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.100792] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64786650-d730-4302-8f93-be3ad306314b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.125651] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1155.125885] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1155.126053] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1155.126239] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1155.126384] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1155.126628] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1155.126883] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1155.127070] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1155.127240] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1155.127402] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1155.127586] env[68244]: DEBUG nova.virt.hardware [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1155.128371] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efbd13a5-e49a-40f2-b0bb-325cf3c1fdd1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.133638] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1155.133638] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bb5cdd-bd19-48ad-0e4a-5cef5db2c5bd" [ 1155.133638] env[68244]: _type = "Task" [ 1155.133638] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.141334] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bb5cdd-bd19-48ad-0e4a-5cef5db2c5bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.373072] env[68244]: DEBUG nova.scheduler.client.report [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.449091] env[68244]: DEBUG oslo_vmware.api [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Task: {'id': task-2781147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41512} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.449753] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1155.449951] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1155.450147] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1155.450318] env[68244]: INFO nova.compute.manager [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1155.450581] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1155.450779] env[68244]: DEBUG nova.compute.manager [-] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1155.451128] env[68244]: DEBUG nova.network.neutron [-] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1155.485223] env[68244]: DEBUG nova.compute.manager [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1155.486257] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.487841] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb76caf0-2b4c-465c-a094-e404950e3c81 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.495118] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.495368] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85c64c7b-1b7d-4f65-a9b2-ea788a50c542 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.501070] env[68244]: DEBUG oslo_vmware.api [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1155.501070] env[68244]: value = "task-2781148" [ 1155.501070] env[68244]: _type = "Task" [ 1155.501070] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.508460] env[68244]: DEBUG oslo_vmware.api [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.582161] env[68244]: DEBUG oslo_concurrency.lockutils [req-a7ea96c9-d1c3-40c5-bf60-59455be5b5fd req-fe90f402-f76c-45b9-a799-75f31dca5abf service nova] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.582161] env[68244]: DEBUG oslo_concurrency.lockutils [req-a7ea96c9-d1c3-40c5-bf60-59455be5b5fd req-fe90f402-f76c-45b9-a799-75f31dca5abf service nova] Acquired lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.583496] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f205696-6388-4d5b-94a1-ac197449d3ad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.604330] env[68244]: DEBUG oslo_concurrency.lockutils [req-a7ea96c9-d1c3-40c5-bf60-59455be5b5fd req-fe90f402-f76c-45b9-a799-75f31dca5abf service nova] Releasing lock "788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.604599] env[68244]: WARNING nova.compute.manager [req-a7ea96c9-d1c3-40c5-bf60-59455be5b5fd req-fe90f402-f76c-45b9-a799-75f31dca5abf service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Detach interface failed, port_id=e701e429-b2ba-44bd-a482-df72b133b5fc, reason: No device with interface-id e701e429-b2ba-44bd-a482-df72b133b5fc exists on VM: nova.exception.NotFound: No device with interface-id e701e429-b2ba-44bd-a482-df72b133b5fc exists on VM [ 1155.643704] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bb5cdd-bd19-48ad-0e4a-5cef5db2c5bd, 'name': SearchDatastore_Task, 'duration_secs': 0.01935} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.648983] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1155.649288] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7751cdd-e527-4d16-900e-4fd4992accca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.667374] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1155.667374] env[68244]: value = "task-2781149" [ 1155.667374] env[68244]: _type = "Task" [ 1155.667374] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.676051] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.878249] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.878787] env[68244]: DEBUG nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1155.881616] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.491s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.883508] env[68244]: INFO nova.compute.claims [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.943995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.944276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.944484] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.944671] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.944851] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.946797] env[68244]: INFO nova.compute.manager [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Terminating instance [ 1156.010565] env[68244]: DEBUG oslo_vmware.api [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781148, 'name': PowerOffVM_Task, 'duration_secs': 0.294358} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.010851] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.011032] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.011278] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4495a1e5-7883-428b-bb66-8e3a573d2007 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.064036] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.064256] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.064437] env[68244]: DEBUG nova.network.neutron [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1156.068939] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.069185] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.069383] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleting the datastore file [datastore2] e2099d6d-5ab7-4a3e-8034-a3b4fc422749 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.069643] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60612923-6de9-4c13-bae2-6cd8a923cf72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.075532] env[68244]: DEBUG oslo_vmware.api [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for the task: (returnval){ [ 1156.075532] env[68244]: value = "task-2781151" [ 1156.075532] env[68244]: _type = "Task" [ 1156.075532] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.083716] env[68244]: DEBUG oslo_vmware.api [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.176771] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781149, 'name': ReconfigVM_Task, 'duration_secs': 0.219005} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.178021] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1156.178021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61d945a-d48b-4e44-a084-8cf25fc2549e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.195108] env[68244]: DEBUG nova.network.neutron [-] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.203444] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.203985] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b57dc3d-2f44-4179-bcb7-66813d1b7774 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.223635] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1156.223635] env[68244]: value = "task-2781152" [ 1156.223635] env[68244]: _type = "Task" [ 1156.223635] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.236688] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781152, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.388171] env[68244]: DEBUG nova.compute.utils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1156.391774] env[68244]: DEBUG nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1156.391945] env[68244]: DEBUG nova.network.neutron [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1156.439513] env[68244]: DEBUG nova.policy [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3151a146805a456da750a47964f86f2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a151f53070d94d08bf7e85617a6f5190', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1156.450819] env[68244]: DEBUG nova.compute.manager [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.451039] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.451907] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe4d65a-f313-4a0c-97f7-05a6d0f34e09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.458919] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1156.459168] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fcddde3-9aa9-4889-9945-ad1fe5122711 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.465038] env[68244]: DEBUG oslo_vmware.api [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1156.465038] env[68244]: value = "task-2781153" [ 1156.465038] env[68244]: _type = "Task" [ 1156.465038] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.472633] env[68244]: DEBUG oslo_vmware.api [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.588757] env[68244]: DEBUG oslo_vmware.api [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Task: {'id': task-2781151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156936} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.589080] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.589346] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.589645] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.589936] env[68244]: INFO nova.compute.manager [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1156.590322] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1156.590644] env[68244]: DEBUG nova.compute.manager [-] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1156.590795] env[68244]: DEBUG nova.network.neutron [-] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1156.706570] env[68244]: INFO nova.compute.manager [-] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Took 1.26 seconds to deallocate network for instance. [ 1156.734330] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781152, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.892382] env[68244]: DEBUG nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1156.906060] env[68244]: DEBUG nova.compute.manager [req-06808373-0ec5-4515-b1cf-77d005e8e75f req-ade44510-4280-4273-b645-3a366dc6280e service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Received event network-vif-deleted-4fb55cce-a3f8-40f2-92e8-9f7166bcbf26 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1156.906274] env[68244]: INFO nova.compute.manager [req-06808373-0ec5-4515-b1cf-77d005e8e75f req-ade44510-4280-4273-b645-3a366dc6280e service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Neutron deleted interface 4fb55cce-a3f8-40f2-92e8-9f7166bcbf26; detaching it from the instance and deleting it from the info cache [ 1156.906444] env[68244]: DEBUG nova.network.neutron [req-06808373-0ec5-4515-b1cf-77d005e8e75f req-ade44510-4280-4273-b645-3a366dc6280e service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.979505] env[68244]: DEBUG oslo_vmware.api [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781153, 'name': PowerOffVM_Task, 'duration_secs': 0.237508} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.981969] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.982179] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.982609] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e034d506-e3b6-43dc-bee8-657de977031d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.039358] env[68244]: DEBUG nova.network.neutron [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Successfully created port: 5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1157.045913] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1157.046147] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1157.046326] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleting the datastore file [datastore2] 788e77e1-a356-4342-9ff3-5ad13868fd77 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1157.046576] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94e07c4d-a643-4ba4-944b-f3a2c681e590 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.052649] env[68244]: DEBUG oslo_vmware.api [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1157.052649] env[68244]: value = "task-2781155" [ 1157.052649] env[68244]: _type = "Task" [ 1157.052649] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.061356] env[68244]: DEBUG oslo_vmware.api [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781155, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.105490] env[68244]: DEBUG nova.compute.manager [req-0457ed92-16a4-48e7-8507-974eeca5404a req-e9194cfa-b994-49bb-b918-ffd31bc9d19f service nova] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Received event network-vif-deleted-d6df42d7-2b90-4e9a-a9cc-15adae4310a1 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1157.124306] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e8749f-f8e0-4625-aec2-0eb3b0739274 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.132104] env[68244]: DEBUG nova.network.neutron [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [{"id": "e2daf964-3dca-4df6-b310-952aab3796a9", "address": "fa:16:3e:7e:5f:a7", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2daf964-3d", "ovs_interfaceid": "e2daf964-3dca-4df6-b310-952aab3796a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.136506] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b4ced4-c0e1-4b3e-a8b8-f3f4c48eb164 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.166868] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197375ab-8c3a-426e-a7cd-e3ae18b1a5c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.174980] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0e06da-c593-4d53-a483-250152fb5efd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.188580] env[68244]: DEBUG nova.compute.provider_tree [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.214382] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.235173] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781152, 'name': ReconfigVM_Task, 'duration_secs': 0.882702} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.235498] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfigured VM instance instance-00000054 to attach disk [datastore2] df935885-c313-473d-aa3a-ba81aa999554/df935885-c313-473d-aa3a-ba81aa999554.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.236477] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66cf967-7d23-4410-9065-40e7183562ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.258707] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9c395e-81b1-49b5-bef2-5075d5b81c04 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.280441] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b3f340-adea-4684-9f5c-b6a973683d02 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.302475] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f060310-b920-47e8-ab95-1ae20e129f09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.308881] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.309169] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95f83ae7-fb51-4ded-8c60-096764c09431 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.315385] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1157.315385] env[68244]: value = "task-2781156" [ 1157.315385] env[68244]: _type = "Task" [ 1157.315385] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.322344] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.385928] env[68244]: DEBUG nova.network.neutron [-] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.409383] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e49b4a81-3033-4e67-a1e2-a150b6f3c194 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.418857] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3346297-6b53-49ff-b29c-cd89a05b49bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.447068] env[68244]: DEBUG nova.compute.manager [req-06808373-0ec5-4515-b1cf-77d005e8e75f req-ade44510-4280-4273-b645-3a366dc6280e service nova] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Detach interface failed, port_id=4fb55cce-a3f8-40f2-92e8-9f7166bcbf26, reason: Instance e2099d6d-5ab7-4a3e-8034-a3b4fc422749 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1157.562667] env[68244]: DEBUG oslo_vmware.api [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781155, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202336} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.562930] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.563124] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.563301] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.563475] env[68244]: INFO nova.compute.manager [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1157.563722] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.563920] env[68244]: DEBUG nova.compute.manager [-] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.564019] env[68244]: DEBUG nova.network.neutron [-] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1157.638560] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-788e77e1-a356-4342-9ff3-5ad13868fd77" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.692074] env[68244]: DEBUG nova.scheduler.client.report [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.827028] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781156, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.889885] env[68244]: INFO nova.compute.manager [-] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Took 1.30 seconds to deallocate network for instance. [ 1157.904212] env[68244]: DEBUG nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1157.935496] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1157.937239] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1157.937239] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1157.937239] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1157.937239] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1157.937239] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1157.937239] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1157.937239] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1157.937549] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1157.937751] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1157.937984] env[68244]: DEBUG nova.virt.hardware [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1157.938952] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9562a5-2829-4733-9190-dc0e5d2ef5c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.948100] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570ae33c-d501-455a-83da-180c91c66d42 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.142606] env[68244]: DEBUG oslo_concurrency.lockutils [None req-39ec03bc-fdbf-4eaf-9e76-21592490d0f7 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-788e77e1-a356-4342-9ff3-5ad13868fd77-e701e429-b2ba-44bd-a482-df72b133b5fc" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.978s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.196586] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.315s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.197117] env[68244]: DEBUG nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1158.199795] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.929s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.200029] env[68244]: DEBUG nova.objects.instance [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lazy-loading 'resources' on Instance uuid ae8211ae-82bb-4a69-aa27-e81de2a06abe {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.325740] env[68244]: DEBUG oslo_vmware.api [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781156, 'name': PowerOnVM_Task, 'duration_secs': 0.626348} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.326282] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.399613] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.479359] env[68244]: DEBUG nova.network.neutron [-] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.542768] env[68244]: DEBUG nova.network.neutron [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Successfully updated port: 5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1158.703188] env[68244]: DEBUG nova.compute.utils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1158.707619] env[68244]: DEBUG nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1158.707991] env[68244]: DEBUG nova.network.neutron [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1158.745853] env[68244]: DEBUG nova.policy [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dd4fe2dbf154c1791b0bf2e9744629a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a16375181ca41fead00ee23bd2a9af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1158.914251] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61632fa-dc05-44ef-a815-83b881ea7317 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.922254] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b4d633-41b5-4692-b157-eda1262142ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.952603] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc020b5b-2e2e-4317-a756-c150f2de70b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.960034] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f27cc9-ccf9-4d5f-8cd5-ee6c7ed7c079 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.981373] env[68244]: DEBUG nova.compute.provider_tree [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.981743] env[68244]: INFO nova.compute.manager [-] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Took 1.42 seconds to deallocate network for instance. [ 1159.043715] env[68244]: DEBUG nova.network.neutron [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Successfully created port: 0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1159.045986] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-b036365a-87d7-44ea-b439-80f1fe0c5f61" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.046138] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-b036365a-87d7-44ea-b439-80f1fe0c5f61" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.046288] env[68244]: DEBUG nova.network.neutron [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1159.135897] env[68244]: DEBUG nova.compute.manager [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Received event network-vif-deleted-e2daf964-3dca-4df6-b310-952aab3796a9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1159.136198] env[68244]: DEBUG nova.compute.manager [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Received event network-vif-plugged-5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1159.136712] env[68244]: DEBUG oslo_concurrency.lockutils [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] Acquiring lock "b036365a-87d7-44ea-b439-80f1fe0c5f61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.136959] env[68244]: DEBUG oslo_concurrency.lockutils [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.137146] env[68244]: DEBUG oslo_concurrency.lockutils [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.137312] env[68244]: DEBUG nova.compute.manager [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] No waiting events found dispatching network-vif-plugged-5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1159.137478] env[68244]: WARNING nova.compute.manager [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Received unexpected event network-vif-plugged-5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 for instance with vm_state building and task_state spawning. [ 1159.137637] env[68244]: DEBUG nova.compute.manager [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Received event network-changed-5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1159.137790] env[68244]: DEBUG nova.compute.manager [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Refreshing instance network info cache due to event network-changed-5615ea2e-6d28-4e93-8c36-fce6a7bd1b07. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1159.137972] env[68244]: DEBUG oslo_concurrency.lockutils [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] Acquiring lock "refresh_cache-b036365a-87d7-44ea-b439-80f1fe0c5f61" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.210435] env[68244]: DEBUG nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1159.374839] env[68244]: INFO nova.compute.manager [None req-b4850b7f-1678-41f8-ab38-88db7454b4ae tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance to original state: 'active' [ 1159.481778] env[68244]: DEBUG nova.scheduler.client.report [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1159.491248] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.577181] env[68244]: DEBUG nova.network.neutron [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1159.706483] env[68244]: DEBUG nova.network.neutron [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Updating instance_info_cache with network_info: [{"id": "5615ea2e-6d28-4e93-8c36-fce6a7bd1b07", "address": "fa:16:3e:d2:be:ec", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615ea2e-6d", "ovs_interfaceid": "5615ea2e-6d28-4e93-8c36-fce6a7bd1b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.986137] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.786s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.988560] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.248s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.990118] env[68244]: INFO nova.compute.claims [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1160.010894] env[68244]: INFO nova.scheduler.client.report [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Deleted allocations for instance ae8211ae-82bb-4a69-aa27-e81de2a06abe [ 1160.210097] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-b036365a-87d7-44ea-b439-80f1fe0c5f61" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.210097] env[68244]: DEBUG nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Instance network_info: |[{"id": "5615ea2e-6d28-4e93-8c36-fce6a7bd1b07", "address": "fa:16:3e:d2:be:ec", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615ea2e-6d", "ovs_interfaceid": "5615ea2e-6d28-4e93-8c36-fce6a7bd1b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1160.210097] env[68244]: DEBUG oslo_concurrency.lockutils [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] Acquired lock "refresh_cache-b036365a-87d7-44ea-b439-80f1fe0c5f61" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.210490] env[68244]: DEBUG nova.network.neutron [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Refreshing network info cache for port 5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1160.211628] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:be:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5615ea2e-6d28-4e93-8c36-fce6a7bd1b07', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1160.219592] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.221642] env[68244]: DEBUG nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1160.223701] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1160.224469] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1846e778-364a-4c5b-b51d-3d1efb82b91b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.252151] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1160.252624] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1160.252624] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1160.252908] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1160.253089] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1160.253278] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1160.253594] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1160.253835] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1160.254063] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1160.254351] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1160.254490] env[68244]: DEBUG nova.virt.hardware [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1160.255849] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845dc3df-9fe1-496f-a166-f5696c4c9368 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.260353] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1160.260353] env[68244]: value = "task-2781157" [ 1160.260353] env[68244]: _type = "Task" [ 1160.260353] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.268148] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26b1b36-f352-4b37-8cf1-2d933372e0ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.277460] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781157, 'name': CreateVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.490645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.490645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.519603] env[68244]: DEBUG oslo_concurrency.lockutils [None req-03038f3e-6368-4771-b6ad-62e22acb2a92 tempest-ServerGroupTestJSON-15191867 tempest-ServerGroupTestJSON-15191867-project-member] Lock "ae8211ae-82bb-4a69-aa27-e81de2a06abe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.796s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.567264] env[68244]: DEBUG nova.network.neutron [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Successfully updated port: 0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1160.771214] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781157, 'name': CreateVM_Task, 'duration_secs': 0.337456} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.771442] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1160.772185] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.772400] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.772709] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1160.772995] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d211dda-87c1-45e9-9d57-2f236fd82a23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.777815] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1160.777815] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a27c43-21d3-c8fe-852d-d2c4f1d09ec4" [ 1160.777815] env[68244]: _type = "Task" [ 1160.777815] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.785587] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a27c43-21d3-c8fe-852d-d2c4f1d09ec4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.879031] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "10e67250-5ddc-430d-aac7-4e6bae0778e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.879351] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.879636] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "10e67250-5ddc-430d-aac7-4e6bae0778e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.880064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.880064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.884230] env[68244]: INFO nova.compute.manager [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Terminating instance [ 1160.916588] env[68244]: DEBUG nova.network.neutron [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Updated VIF entry in instance network info cache for port 5615ea2e-6d28-4e93-8c36-fce6a7bd1b07. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.916933] env[68244]: DEBUG nova.network.neutron [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Updating instance_info_cache with network_info: [{"id": "5615ea2e-6d28-4e93-8c36-fce6a7bd1b07", "address": "fa:16:3e:d2:be:ec", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5615ea2e-6d", "ovs_interfaceid": "5615ea2e-6d28-4e93-8c36-fce6a7bd1b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.993760] env[68244]: INFO nova.compute.manager [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Detaching volume c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb [ 1161.029125] env[68244]: INFO nova.virt.block_device [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Attempting to driver detach volume c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb from mountpoint /dev/sdb [ 1161.029125] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1161.029125] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559123', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'name': 'volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd46f6695-7a96-4e0b-b43a-236bcb4ec519', 'attached_at': '', 'detached_at': '', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'serial': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1161.029780] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a269cd-c768-4c54-9c97-0b4bb1f1fe76 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.054888] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fe9bb6-5303-4960-8cb0-d3c22779e056 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.062816] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d68e6f8-852e-4aa2-8b20-c4cd5772a0d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.083428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.083589] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.083734] env[68244]: DEBUG nova.network.neutron [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.088879] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb16c54a-7ad8-4fa7-9ab6-221ed5504dce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.105433] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] The volume has not been displaced from its original location: [datastore2] volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb/volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1161.110823] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfiguring VM instance instance-00000034 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1161.113788] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dfedcd5-0c57-42f2-a784-1cab4af397e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.132782] env[68244]: DEBUG oslo_vmware.api [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1161.132782] env[68244]: value = "task-2781158" [ 1161.132782] env[68244]: _type = "Task" [ 1161.132782] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.144865] env[68244]: DEBUG oslo_vmware.api [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781158, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.160873] env[68244]: DEBUG nova.compute.manager [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Received event network-vif-plugged-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1161.161111] env[68244]: DEBUG oslo_concurrency.lockutils [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] Acquiring lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.161363] env[68244]: DEBUG oslo_concurrency.lockutils [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.161511] env[68244]: DEBUG oslo_concurrency.lockutils [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.161575] env[68244]: DEBUG nova.compute.manager [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] No waiting events found dispatching network-vif-plugged-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1161.161751] env[68244]: WARNING nova.compute.manager [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Received unexpected event network-vif-plugged-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 for instance with vm_state building and task_state spawning. [ 1161.161961] env[68244]: DEBUG nova.compute.manager [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Received event network-changed-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1161.162087] env[68244]: DEBUG nova.compute.manager [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Refreshing instance network info cache due to event network-changed-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1161.162272] env[68244]: DEBUG oslo_concurrency.lockutils [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] Acquiring lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.230220] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c280a619-1dad-481f-add5-62f5b33863aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.238838] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e076c5d9-e324-4872-ab8c-229a8a2314ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.271646] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfdaaca-112f-4d37-8f16-c1f4b9ffba9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.282826] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5fb187-f827-47a9-8615-47f43bfbbd37 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.291938] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a27c43-21d3-c8fe-852d-d2c4f1d09ec4, 'name': SearchDatastore_Task, 'duration_secs': 0.010021} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.300042] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.300306] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1161.300561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.300732] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.300918] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.301795] env[68244]: DEBUG nova.compute.provider_tree [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.304239] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06e9ba87-5f5f-4eb7-b219-67b9a7293833 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.318133] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.318330] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1161.319433] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2257d919-cb27-47c8-9f1f-94d6ec9591ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.325278] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1161.325278] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eef96e-3a2f-c34d-6acd-fe4e76570a16" [ 1161.325278] env[68244]: _type = "Task" [ 1161.325278] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.333031] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eef96e-3a2f-c34d-6acd-fe4e76570a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.355144] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "df935885-c313-473d-aa3a-ba81aa999554" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.355409] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.355642] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "df935885-c313-473d-aa3a-ba81aa999554-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.355803] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.355977] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.358247] env[68244]: INFO nova.compute.manager [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Terminating instance [ 1161.388407] env[68244]: DEBUG nova.compute.manager [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1161.388736] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.389638] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48af933-beb6-4f56-866f-a1e479943fa9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.397124] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.397364] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b4eeb31-7cda-4cd5-8a51-2946f97d1b16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.403779] env[68244]: DEBUG oslo_vmware.api [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1161.403779] env[68244]: value = "task-2781159" [ 1161.403779] env[68244]: _type = "Task" [ 1161.403779] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.412255] env[68244]: DEBUG oslo_vmware.api [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.422164] env[68244]: DEBUG oslo_concurrency.lockutils [req-93603b63-7643-4acf-b134-944aeafc7d8b req-bc94f8c6-3ac6-4a5c-b228-e3c3e5245736 service nova] Releasing lock "refresh_cache-b036365a-87d7-44ea-b439-80f1fe0c5f61" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.623537] env[68244]: DEBUG nova.network.neutron [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1161.641892] env[68244]: DEBUG oslo_vmware.api [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781158, 'name': ReconfigVM_Task, 'duration_secs': 0.360153} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.644333] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Reconfigured VM instance instance-00000034 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1161.649399] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5f7f3d3-6ef1-437d-9a5a-70878d558855 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.664311] env[68244]: DEBUG oslo_vmware.api [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1161.664311] env[68244]: value = "task-2781160" [ 1161.664311] env[68244]: _type = "Task" [ 1161.664311] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.676294] env[68244]: DEBUG oslo_vmware.api [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781160, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.788904] env[68244]: DEBUG nova.network.neutron [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Updating instance_info_cache with network_info: [{"id": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "address": "fa:16:3e:e0:e4:c8", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f6b8a1d-f6", "ovs_interfaceid": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.805208] env[68244]: DEBUG nova.scheduler.client.report [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1161.836308] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52eef96e-3a2f-c34d-6acd-fe4e76570a16, 'name': SearchDatastore_Task, 'duration_secs': 0.01483} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.837178] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36c6b4ff-e77d-4057-9264-385178a6db6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.842972] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1161.842972] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ca070-a5c1-5301-bf99-3a185304f95e" [ 1161.842972] env[68244]: _type = "Task" [ 1161.842972] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.850885] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ca070-a5c1-5301-bf99-3a185304f95e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.862301] env[68244]: DEBUG nova.compute.manager [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1161.862544] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.862825] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a373221-920b-424e-ac36-b1b94bb571d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.869583] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1161.869583] env[68244]: value = "task-2781161" [ 1161.869583] env[68244]: _type = "Task" [ 1161.869583] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.877847] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.914735] env[68244]: DEBUG oslo_vmware.api [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781159, 'name': PowerOffVM_Task, 'duration_secs': 0.229373} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.914912] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1161.915088] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1161.915410] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef54235f-d234-488a-944e-2a2ce7a2c5f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.980018] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1161.980464] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1161.980846] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleting the datastore file [datastore2] 10e67250-5ddc-430d-aac7-4e6bae0778e5 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1161.981171] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3359b288-d56a-4187-ab46-ad3404a214fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.988050] env[68244]: DEBUG oslo_vmware.api [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1161.988050] env[68244]: value = "task-2781163" [ 1161.988050] env[68244]: _type = "Task" [ 1161.988050] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.995502] env[68244]: DEBUG oslo_vmware.api [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.177022] env[68244]: DEBUG oslo_vmware.api [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781160, 'name': ReconfigVM_Task, 'duration_secs': 0.267472} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.177022] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559123', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'name': 'volume-c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd46f6695-7a96-4e0b-b43a-236bcb4ec519', 'attached_at': '', 'detached_at': '', 'volume_id': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb', 'serial': 'c83fd2fe-a8c0-45ca-b4a5-e95c88deeabb'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1162.297891] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.297891] env[68244]: DEBUG nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Instance network_info: |[{"id": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "address": "fa:16:3e:e0:e4:c8", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f6b8a1d-f6", "ovs_interfaceid": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1162.297891] env[68244]: DEBUG oslo_concurrency.lockutils [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] Acquired lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.297891] env[68244]: DEBUG nova.network.neutron [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Refreshing network info cache for port 0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.297891] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:e4:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1162.306343] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.307442] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1162.307560] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8edc49c1-7661-4cb9-aac6-c758f7a6713f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.323391] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.323868] env[68244]: DEBUG nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1162.330020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.112s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.330020] env[68244]: DEBUG nova.objects.instance [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lazy-loading 'resources' on Instance uuid 2aacd21f-d664-4267-8331-d3862f43d35b {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.333897] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1162.333897] env[68244]: value = "task-2781164" [ 1162.333897] env[68244]: _type = "Task" [ 1162.333897] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.345188] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781164, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.356020] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521ca070-a5c1-5301-bf99-3a185304f95e, 'name': SearchDatastore_Task, 'duration_secs': 0.010552} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.356020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.356020] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1162.356020] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8265444-42cc-42a2-8200-447940dac478 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.363612] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1162.363612] env[68244]: value = "task-2781165" [ 1162.363612] env[68244]: _type = "Task" [ 1162.363612] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.372539] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.380645] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781161, 'name': PowerOffVM_Task, 'duration_secs': 0.197689} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.381589] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.381792] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1162.381985] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559136', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'name': 'volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'df935885-c313-473d-aa3a-ba81aa999554', 'attached_at': '2025-03-06T03:29:21.000000', 'detached_at': '', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'serial': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1162.382785] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae4fa07-da14-4038-a7c8-635bc53cb3ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.410950] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4235e78-1fe3-4eeb-82f9-66d19c6acfec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.418584] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac16e74-24e0-416e-93d6-8d01080c9298 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.441552] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bb4be2-027e-4685-be8e-2f64d302ce38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.457434] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] The volume has not been displaced from its original location: [datastore2] volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a/volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1162.462876] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfiguring VM instance instance-00000054 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1162.463899] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-669603f8-1fba-454d-99ab-534a79c3049f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.483861] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1162.483861] env[68244]: value = "task-2781166" [ 1162.483861] env[68244]: _type = "Task" [ 1162.483861] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.491935] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.499674] env[68244]: DEBUG oslo_vmware.api [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221927} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.499952] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1162.500158] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1162.500348] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1162.500563] env[68244]: INFO nova.compute.manager [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1162.500782] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.500977] env[68244]: DEBUG nova.compute.manager [-] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1162.501085] env[68244]: DEBUG nova.network.neutron [-] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1162.734619] env[68244]: DEBUG nova.objects.instance [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'flavor' on Instance uuid d46f6695-7a96-4e0b-b43a-236bcb4ec519 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.835829] env[68244]: DEBUG nova.compute.utils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1162.837659] env[68244]: DEBUG nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1162.837849] env[68244]: DEBUG nova.network.neutron [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1162.851514] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781164, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.885106] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781165, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.945110] env[68244]: DEBUG nova.policy [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2fbed52207f4475f82206e3acd952bc7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d5a65317c2844988b2bed143e0529ff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1162.998171] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781166, 'name': ReconfigVM_Task, 'duration_secs': 0.512419} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.998675] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Reconfigured VM instance instance-00000054 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1163.006351] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3186700d-9e0d-46ea-a34f-f1f86f83a390 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.029748] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1163.029748] env[68244]: value = "task-2781167" [ 1163.029748] env[68244]: _type = "Task" [ 1163.029748] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.043052] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781167, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.093766] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c31a3cd-e710-47fb-9f2d-f487b46cffc4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.101069] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbaf0d3-5506-49c2-8a22-74f53bcd388f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.136417] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29519d3a-a198-4f09-8777-272142f6534c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.145688] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299e1180-f16a-4f28-be5f-8cbc47bb1c8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.151251] env[68244]: DEBUG nova.network.neutron [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Updated VIF entry in instance network info cache for port 0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1163.151711] env[68244]: DEBUG nova.network.neutron [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Updating instance_info_cache with network_info: [{"id": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "address": "fa:16:3e:e0:e4:c8", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f6b8a1d-f6", "ovs_interfaceid": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.165762] env[68244]: DEBUG nova.compute.provider_tree [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1163.300947] env[68244]: DEBUG nova.compute.manager [req-896131fa-06cf-48df-bfa6-794f6763d1f2 req-2794a0c3-9b94-4078-935e-1359a029c689 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Received event network-vif-deleted-9d57c368-4817-44e7-a55f-02a83f75dabc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1163.301224] env[68244]: INFO nova.compute.manager [req-896131fa-06cf-48df-bfa6-794f6763d1f2 req-2794a0c3-9b94-4078-935e-1359a029c689 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Neutron deleted interface 9d57c368-4817-44e7-a55f-02a83f75dabc; detaching it from the instance and deleting it from the info cache [ 1163.301353] env[68244]: DEBUG nova.network.neutron [req-896131fa-06cf-48df-bfa6-794f6763d1f2 req-2794a0c3-9b94-4078-935e-1359a029c689 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.344250] env[68244]: DEBUG nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1163.357338] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781164, 'name': CreateVM_Task, 'duration_secs': 0.584984} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.357651] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1163.358604] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.358901] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.359339] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1163.359654] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3890f1ca-e450-4cc4-884c-a72caefd53f7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.365016] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1163.365016] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523404d0-02ef-76ee-4299-58137ea71a63" [ 1163.365016] env[68244]: _type = "Task" [ 1163.365016] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.381404] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781165, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561106} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.384198] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1163.384448] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1163.384733] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523404d0-02ef-76ee-4299-58137ea71a63, 'name': SearchDatastore_Task, 'duration_secs': 0.009241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.384979] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06c60d28-086c-48d0-8cc8-2d0a678c0533 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.387095] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.387219] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1163.387893] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.387893] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.387893] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1163.388200] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74cde1a8-82b6-4d6c-8872-6b1682c2cde7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.395783] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1163.395783] env[68244]: value = "task-2781168" [ 1163.395783] env[68244]: _type = "Task" [ 1163.395783] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.396921] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1163.397164] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1163.401409] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70797f3d-c771-4245-ab40-f8569cb4d56d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.409325] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.410755] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1163.410755] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523a3e5d-bb33-6967-f05b-8714997a4935" [ 1163.410755] env[68244]: _type = "Task" [ 1163.410755] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.420650] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523a3e5d-bb33-6967-f05b-8714997a4935, 'name': SearchDatastore_Task, 'duration_secs': 0.010197} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.421472] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af35c9ea-110f-4659-a944-b9ca95c98e6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.427996] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1163.427996] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5229575d-c9a5-36b6-0c4c-688b0a70a5c9" [ 1163.427996] env[68244]: _type = "Task" [ 1163.427996] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.436661] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5229575d-c9a5-36b6-0c4c-688b0a70a5c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.443603] env[68244]: DEBUG nova.network.neutron [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Successfully created port: ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1163.541159] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781167, 'name': ReconfigVM_Task, 'duration_secs': 0.150209} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.541472] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559136', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'name': 'volume-4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'df935885-c313-473d-aa3a-ba81aa999554', 'attached_at': '2025-03-06T03:29:21.000000', 'detached_at': '', 'volume_id': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a', 'serial': '4b2ae9b1-b0ee-4218-8c94-6e3f817e161a'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1163.541835] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1163.542653] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc1dcc6-cc62-4cc7-b021-7d4823e84567 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.551400] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.551799] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5236e3f-d1aa-4820-96d6-d6b33008cdda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.616267] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1163.616499] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1163.616686] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleting the datastore file [datastore2] df935885-c313-473d-aa3a-ba81aa999554 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.616956] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b857f5c2-5048-4ed4-93ec-dc81cbb051c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.627512] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1163.627512] env[68244]: value = "task-2781170" [ 1163.627512] env[68244]: _type = "Task" [ 1163.627512] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.638165] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.642022] env[68244]: DEBUG nova.network.neutron [-] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.654048] env[68244]: DEBUG oslo_concurrency.lockutils [req-5c5c8472-3490-4c07-810f-79f7bb4af336 req-9b73137e-5fb4-4920-aae9-d166cb061b07 service nova] Releasing lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.689189] env[68244]: ERROR nova.scheduler.client.report [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] [req-24769b16-65e8-4523-b24c-3c7c6bc5e097] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-24769b16-65e8-4523-b24c-3c7c6bc5e097"}]} [ 1163.705942] env[68244]: DEBUG nova.scheduler.client.report [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1163.721908] env[68244]: DEBUG nova.scheduler.client.report [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1163.722145] env[68244]: DEBUG nova.compute.provider_tree [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1163.733881] env[68244]: DEBUG nova.scheduler.client.report [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1163.746580] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42c9673b-0d20-424c-a90d-d1a35a386ff3 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.256s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.752866] env[68244]: DEBUG nova.scheduler.client.report [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1163.804721] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a198667f-ebc5-460d-a2ba-a3e6636b7924 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.814932] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c87e42b-60d1-4509-a715-0bf28580872c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.846847] env[68244]: DEBUG nova.compute.manager [req-896131fa-06cf-48df-bfa6-794f6763d1f2 req-2794a0c3-9b94-4078-935e-1359a029c689 service nova] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Detach interface failed, port_id=9d57c368-4817-44e7-a55f-02a83f75dabc, reason: Instance 10e67250-5ddc-430d-aac7-4e6bae0778e5 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1163.909679] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074433} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.913070] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.914915] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fafed160-6f34-4a29-9228-bae3ae0a328f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.943080] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.948083] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01c9e333-c6ec-48dc-b7ba-098fc8e0d610 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.970038] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5229575d-c9a5-36b6-0c4c-688b0a70a5c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009028} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.970994] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.971270] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1163.971606] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1163.971606] env[68244]: value = "task-2781171" [ 1163.971606] env[68244]: _type = "Task" [ 1163.971606] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.971868] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d091bbed-13f9-4d2d-aa30-b9660c8195f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.984948] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781171, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.986260] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1163.986260] env[68244]: value = "task-2781172" [ 1163.986260] env[68244]: _type = "Task" [ 1163.986260] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.997653] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.005230] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23015ba1-d4bb-413d-bdd5-cb1f6661d45e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.012980] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3f13a0-3aac-4223-af0f-f555a2599cfa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.042995] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9f64fb-9c0e-45f4-a880-d1ee03169019 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.051056] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f554a81-4084-4f63-94a9-e6569ef33ad6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.067393] env[68244]: DEBUG nova.compute.provider_tree [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1164.137705] env[68244]: DEBUG oslo_vmware.api [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.297676} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.137963] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.138169] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.138383] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.138835] env[68244]: INFO nova.compute.manager [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1164.138835] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.139122] env[68244]: DEBUG nova.compute.manager [-] [instance: df935885-c313-473d-aa3a-ba81aa999554] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1164.139122] env[68244]: DEBUG nova.network.neutron [-] [instance: df935885-c313-473d-aa3a-ba81aa999554] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1164.143507] env[68244]: INFO nova.compute.manager [-] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Took 1.64 seconds to deallocate network for instance. [ 1164.359503] env[68244]: DEBUG nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1164.392924] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1164.393236] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1164.393392] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1164.393574] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1164.393726] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1164.393873] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1164.394103] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1164.394578] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1164.394578] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1164.394578] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1164.394737] env[68244]: DEBUG nova.virt.hardware [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1164.395668] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97cb3d1-c528-4bc6-bf14-f620af920646 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.404162] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baef027-5ec8-43a8-bfdb-14b7873b5da7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.484612] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.496531] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781172, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.609360] env[68244]: DEBUG nova.scheduler.client.report [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 148 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1164.609650] env[68244]: DEBUG nova.compute.provider_tree [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 148 to 149 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1164.609832] env[68244]: DEBUG nova.compute.provider_tree [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1164.651413] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.903780] env[68244]: DEBUG nova.compute.manager [req-e8e6ee97-b86c-4959-bee5-491c9fe3e8c3 req-ed82e020-21d5-4ff6-bd84-395abd87e782 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Received event network-vif-plugged-ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1164.904021] env[68244]: DEBUG oslo_concurrency.lockutils [req-e8e6ee97-b86c-4959-bee5-491c9fe3e8c3 req-ed82e020-21d5-4ff6-bd84-395abd87e782 service nova] Acquiring lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.904235] env[68244]: DEBUG oslo_concurrency.lockutils [req-e8e6ee97-b86c-4959-bee5-491c9fe3e8c3 req-ed82e020-21d5-4ff6-bd84-395abd87e782 service nova] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.904409] env[68244]: DEBUG oslo_concurrency.lockutils [req-e8e6ee97-b86c-4959-bee5-491c9fe3e8c3 req-ed82e020-21d5-4ff6-bd84-395abd87e782 service nova] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.904564] env[68244]: DEBUG nova.compute.manager [req-e8e6ee97-b86c-4959-bee5-491c9fe3e8c3 req-ed82e020-21d5-4ff6-bd84-395abd87e782 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] No waiting events found dispatching network-vif-plugged-ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1164.904730] env[68244]: WARNING nova.compute.manager [req-e8e6ee97-b86c-4959-bee5-491c9fe3e8c3 req-ed82e020-21d5-4ff6-bd84-395abd87e782 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Received unexpected event network-vif-plugged-ffb04675-4c65-4d62-ab42-459a01bb68b5 for instance with vm_state building and task_state spawning. [ 1164.924255] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.924491] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.924693] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.924912] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.925113] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.927760] env[68244]: INFO nova.compute.manager [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Terminating instance [ 1164.986680] env[68244]: DEBUG nova.network.neutron [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Successfully updated port: ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1164.997138] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781171, 'name': ReconfigVM_Task, 'duration_secs': 0.761732} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.998057] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Reconfigured VM instance instance-00000063 to attach disk [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1164.999081] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1fc351ec-b6d4-4e19-835c-3047cdace3e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.004752] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781172, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664192} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.005384] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1165.005628] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1165.005909] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e834aa53-fed7-472a-9281-61a9e01f9374 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.011924] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1165.011924] env[68244]: value = "task-2781173" [ 1165.011924] env[68244]: _type = "Task" [ 1165.011924] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.016229] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1165.016229] env[68244]: value = "task-2781174" [ 1165.016229] env[68244]: _type = "Task" [ 1165.016229] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.022777] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781173, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.028280] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.115756] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.789s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.118274] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.719s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.119011] env[68244]: DEBUG nova.objects.instance [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lazy-loading 'resources' on Instance uuid e2099d6d-5ab7-4a3e-8034-a3b4fc422749 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.120226] env[68244]: DEBUG nova.network.neutron [-] [instance: df935885-c313-473d-aa3a-ba81aa999554] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.146401] env[68244]: INFO nova.scheduler.client.report [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Deleted allocations for instance 2aacd21f-d664-4267-8331-d3862f43d35b [ 1165.333982] env[68244]: DEBUG nova.compute.manager [req-e2c6dbda-af0d-43b3-a205-307c9f68e2fc req-33c145b8-d9d8-4b59-bc7e-59f0531b3f4c service nova] [instance: df935885-c313-473d-aa3a-ba81aa999554] Received event network-vif-deleted-9389f00b-7d76-4743-9f6d-d9af08918ce6 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1165.431918] env[68244]: DEBUG nova.compute.manager [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1165.432252] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1165.433064] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594e8489-680c-418a-be98-c0e1a982bdcf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.441039] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.441168] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ede10131-efbc-41e6-bf27-0ce0ed2c8eef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.447553] env[68244]: DEBUG oslo_vmware.api [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1165.447553] env[68244]: value = "task-2781175" [ 1165.447553] env[68244]: _type = "Task" [ 1165.447553] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.455372] env[68244]: DEBUG oslo_vmware.api [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.494694] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.494885] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquired lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.495081] env[68244]: DEBUG nova.network.neutron [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1165.524316] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781173, 'name': Rename_Task, 'duration_secs': 0.150329} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.524967] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1165.525172] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8051d3c7-ba9d-4840-ae52-785ec9bad4e9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.529491] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083031} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.530036] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1165.530892] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595d9968-a2d5-4080-8e32-048bf99f77a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.534998] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1165.534998] env[68244]: value = "task-2781176" [ 1165.534998] env[68244]: _type = "Task" [ 1165.534998] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.553956] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1165.556575] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9302a2b-6887-453f-906e-56f690837311 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.576628] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781176, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.578826] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1165.578826] env[68244]: value = "task-2781177" [ 1165.578826] env[68244]: _type = "Task" [ 1165.578826] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.587780] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781177, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.625062] env[68244]: INFO nova.compute.manager [-] [instance: df935885-c313-473d-aa3a-ba81aa999554] Took 1.49 seconds to deallocate network for instance. [ 1165.657412] env[68244]: DEBUG oslo_concurrency.lockutils [None req-adc7786d-eab8-4cad-ab31-116e7a41a66a tempest-ServersTestJSON-1639636095 tempest-ServersTestJSON-1639636095-project-member] Lock "2aacd21f-d664-4267-8331-d3862f43d35b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.843s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.784938] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23954d1e-1d0d-41a7-b152-4d7bb9227d40 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.794471] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510d84f3-9050-428c-a3d9-03b09720dc5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.824814] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe88650-ae07-4e1f-b3a2-94ae9b8a4e97 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.832109] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f2cea9-fe04-4878-835e-fce8a03ec055 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.845512] env[68244]: DEBUG nova.compute.provider_tree [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.956908] env[68244]: DEBUG oslo_vmware.api [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781175, 'name': PowerOffVM_Task, 'duration_secs': 0.245285} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.957238] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1165.957392] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1165.957665] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-402dfa5e-dc54-45c4-ab67-862e4bac05e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.018067] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1166.018366] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1166.018913] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleting the datastore file [datastore2] d46f6695-7a96-4e0b-b43a-236bcb4ec519 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.019029] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bc1c777-d2df-4c5d-8686-2241e207dbd9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.026523] env[68244]: DEBUG oslo_vmware.api [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1166.026523] env[68244]: value = "task-2781179" [ 1166.026523] env[68244]: _type = "Task" [ 1166.026523] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.030548] env[68244]: DEBUG nova.network.neutron [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1166.037140] env[68244]: DEBUG oslo_vmware.api [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.055556] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781176, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.089225] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781177, 'name': ReconfigVM_Task, 'duration_secs': 0.410268} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.089225] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1166.089639] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2080f9a-76d3-4389-98f1-5edcf81e7318 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.095502] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1166.095502] env[68244]: value = "task-2781180" [ 1166.095502] env[68244]: _type = "Task" [ 1166.095502] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.107312] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781180, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.172367] env[68244]: INFO nova.compute.manager [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: df935885-c313-473d-aa3a-ba81aa999554] Took 0.55 seconds to detach 1 volumes for instance. [ 1166.240078] env[68244]: DEBUG nova.network.neutron [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Updating instance_info_cache with network_info: [{"id": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "address": "fa:16:3e:e6:6f:21", "network": {"id": "f3ea2693-a0b2-4d06-8d5d-cee6e9b8dfc4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1228942130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d5a65317c2844988b2bed143e0529ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffb04675-4c", "ovs_interfaceid": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.348746] env[68244]: DEBUG nova.scheduler.client.report [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1166.537036] env[68244]: DEBUG oslo_vmware.api [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189893} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.537351] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.537490] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.537703] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.537894] env[68244]: INFO nova.compute.manager [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1166.538160] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1166.538359] env[68244]: DEBUG nova.compute.manager [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1166.538456] env[68244]: DEBUG nova.network.neutron [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1166.553331] env[68244]: DEBUG oslo_vmware.api [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781176, 'name': PowerOnVM_Task, 'duration_secs': 0.539559} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.553612] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1166.553821] env[68244]: INFO nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Took 8.65 seconds to spawn the instance on the hypervisor. [ 1166.554011] env[68244]: DEBUG nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.555032] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f7328e-a86a-4684-a3ca-4ca68917953e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.606278] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781180, 'name': Rename_Task, 'duration_secs': 0.165186} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.606559] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.606835] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f19da166-85e0-448d-9a10-0bfa3d7737e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.614564] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1166.614564] env[68244]: value = "task-2781181" [ 1166.614564] env[68244]: _type = "Task" [ 1166.614564] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.623981] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.678415] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.743914] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Releasing lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.743914] env[68244]: DEBUG nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Instance network_info: |[{"id": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "address": "fa:16:3e:e6:6f:21", "network": {"id": "f3ea2693-a0b2-4d06-8d5d-cee6e9b8dfc4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1228942130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d5a65317c2844988b2bed143e0529ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffb04675-4c", "ovs_interfaceid": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1166.743914] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:6f:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0c293d47-74c0-49d7-a474-cdb643080f6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ffb04675-4c65-4d62-ab42-459a01bb68b5', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1166.751423] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Creating folder: Project (1d5a65317c2844988b2bed143e0529ff). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1166.752019] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28e9959c-0234-4dc1-ad41-f0b1e17b52b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.764626] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Created folder: Project (1d5a65317c2844988b2bed143e0529ff) in parent group-v558876. [ 1166.764864] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Creating folder: Instances. Parent ref: group-v559153. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1166.765147] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9662a296-a8f8-484a-9795-773192838495 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.775813] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Created folder: Instances in parent group-v559153. [ 1166.777033] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1166.777033] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1166.777033] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87a39d8c-b4cc-4b9d-a45f-134895618bb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.797659] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1166.797659] env[68244]: value = "task-2781184" [ 1166.797659] env[68244]: _type = "Task" [ 1166.797659] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.809253] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781184, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.856398] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.858951] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.368s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.859258] env[68244]: DEBUG nova.objects.instance [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'resources' on Instance uuid 788e77e1-a356-4342-9ff3-5ad13868fd77 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.888367] env[68244]: INFO nova.scheduler.client.report [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Deleted allocations for instance e2099d6d-5ab7-4a3e-8034-a3b4fc422749 [ 1166.949714] env[68244]: DEBUG nova.compute.manager [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Received event network-changed-ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1166.951706] env[68244]: DEBUG nova.compute.manager [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Refreshing instance network info cache due to event network-changed-ffb04675-4c65-4d62-ab42-459a01bb68b5. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1166.951964] env[68244]: DEBUG oslo_concurrency.lockutils [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] Acquiring lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.952144] env[68244]: DEBUG oslo_concurrency.lockutils [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] Acquired lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.952439] env[68244]: DEBUG nova.network.neutron [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Refreshing network info cache for port ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1167.074083] env[68244]: INFO nova.compute.manager [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Took 20.01 seconds to build instance. [ 1167.126740] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.309141] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781184, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.404251] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b795f0d2-5dfc-4736-a817-bb3433fd0320 tempest-ServerRescueNegativeTestJSON-1675782762 tempest-ServerRescueNegativeTestJSON-1675782762-project-member] Lock "e2099d6d-5ab7-4a3e-8034-a3b4fc422749" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.427s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.579173] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e3419ffe-0a72-416c-a7de-f54d1bb9d24d tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.517s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.586457] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396e427b-538e-4210-b314-67b0fbd14243 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.594586] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39a5885-f43d-478b-ab59-aa06729d2eca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.633672] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6277f8-291f-4598-8a22-022f189ac33b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.644397] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adaaac4-a5f2-4171-bf16-027bffbba032 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.648316] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.659391] env[68244]: DEBUG nova.compute.provider_tree [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.676546] env[68244]: DEBUG nova.network.neutron [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.715439] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.715690] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.715865] env[68244]: DEBUG nova.compute.manager [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1167.717539] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b55dbfe-72f9-49e2-ae0d-9c0d62f06def {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.725181] env[68244]: DEBUG nova.compute.manager [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1167.727094] env[68244]: DEBUG nova.objects.instance [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'flavor' on Instance uuid b036365a-87d7-44ea-b439-80f1fe0c5f61 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1167.775278] env[68244]: DEBUG nova.network.neutron [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Updated VIF entry in instance network info cache for port ffb04675-4c65-4d62-ab42-459a01bb68b5. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1167.775715] env[68244]: DEBUG nova.network.neutron [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Updating instance_info_cache with network_info: [{"id": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "address": "fa:16:3e:e6:6f:21", "network": {"id": "f3ea2693-a0b2-4d06-8d5d-cee6e9b8dfc4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1228942130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d5a65317c2844988b2bed143e0529ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffb04675-4c", "ovs_interfaceid": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.809033] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781184, 'name': CreateVM_Task, 'duration_secs': 0.754014} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.809033] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1167.809757] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.809922] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.810255] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1167.810515] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e98e89f4-9894-4640-a82b-333f9f296df5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.816391] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1167.816391] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e09cc2-ef86-655a-7de5-41e334ae9a08" [ 1167.816391] env[68244]: _type = "Task" [ 1167.816391] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.824616] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e09cc2-ef86-655a-7de5-41e334ae9a08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.137282] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.166486] env[68244]: DEBUG nova.scheduler.client.report [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1168.181021] env[68244]: INFO nova.compute.manager [-] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Took 1.64 seconds to deallocate network for instance. [ 1168.278630] env[68244]: DEBUG oslo_concurrency.lockutils [req-37429e03-a64c-4869-9ad6-4b791b3822b4 req-35f4030f-24aa-4ad4-a42a-bfe0da1f0995 service nova] Releasing lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.328963] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e09cc2-ef86-655a-7de5-41e334ae9a08, 'name': SearchDatastore_Task, 'duration_secs': 0.029558} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.328963] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.331490] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1168.331490] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.331490] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.331490] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1168.331490] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83ef9cb7-5300-4bb0-b003-392b3a685d77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.341468] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1168.341588] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1168.342401] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae1d6d7e-ea6c-4890-8b41-160a22cf24ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.347660] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1168.347660] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f9477b-23a3-e03f-4fad-05643d986e12" [ 1168.347660] env[68244]: _type = "Task" [ 1168.347660] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.356385] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f9477b-23a3-e03f-4fad-05643d986e12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.642517] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.671028] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.676777] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.027s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.677227] env[68244]: DEBUG nova.objects.instance [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'resources' on Instance uuid 10e67250-5ddc-430d-aac7-4e6bae0778e5 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.692008] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.703205] env[68244]: INFO nova.scheduler.client.report [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted allocations for instance 788e77e1-a356-4342-9ff3-5ad13868fd77 [ 1168.735233] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.735233] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ada84380-08b1-470d-81d3-2dde25e2a45d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.744318] env[68244]: DEBUG oslo_vmware.api [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1168.744318] env[68244]: value = "task-2781185" [ 1168.744318] env[68244]: _type = "Task" [ 1168.744318] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.752342] env[68244]: DEBUG oslo_vmware.api [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.858312] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f9477b-23a3-e03f-4fad-05643d986e12, 'name': SearchDatastore_Task, 'duration_secs': 0.012515} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.859186] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7954f73-7dbe-4642-b999-be56558a9213 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.864374] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1168.864374] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a6ffd8-9ae4-8969-7a81-75741dd084c5" [ 1168.864374] env[68244]: _type = "Task" [ 1168.864374] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.873375] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a6ffd8-9ae4-8969-7a81-75741dd084c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.985478] env[68244]: DEBUG nova.compute.manager [req-c98ef9f6-3949-4998-98f8-d7e07a6b868a req-97012e60-0161-4a0a-8de2-8663190dcb59 service nova] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Received event network-vif-deleted-9bcf5292-c53f-42bf-97f1-7f616748f9ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1169.138681] env[68244]: DEBUG oslo_vmware.api [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781181, 'name': PowerOnVM_Task, 'duration_secs': 2.399654} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.138947] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1169.139347] env[68244]: INFO nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1169.139477] env[68244]: DEBUG nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1169.140272] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4631918a-23af-48a1-8543-72d87bb23df5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.212412] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19a87373-70f8-48bc-b4a8-6a6a4ae170be tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "788e77e1-a356-4342-9ff3-5ad13868fd77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.268s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.261874] env[68244]: DEBUG oslo_vmware.api [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781185, 'name': PowerOffVM_Task, 'duration_secs': 0.188988} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.262829] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.262829] env[68244]: DEBUG nova.compute.manager [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1169.263463] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53041bd5-2593-4c5e-9486-636e1881db6c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.378051] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a6ffd8-9ae4-8969-7a81-75741dd084c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009931} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.378051] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.378193] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab/da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1169.378543] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6fd283f-7a1b-4c28-9720-13e9e15e14ee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.385204] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1169.385204] env[68244]: value = "task-2781186" [ 1169.385204] env[68244]: _type = "Task" [ 1169.385204] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.397496] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.405922] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df670d05-ad9f-48da-8478-2fa9d5b3fd7f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.416884] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb301f2-bbff-4973-b425-e4a7507b9584 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.471799] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641891d7-ee06-4065-8944-a228d98dfed1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.483218] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3def1d21-09fb-481e-bc92-317baf171479 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.508488] env[68244]: DEBUG nova.compute.provider_tree [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.664675] env[68244]: INFO nova.compute.manager [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Took 22.29 seconds to build instance. [ 1169.782699] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d2923a0-7e7d-4a51-9544-34317111d0f9 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.067s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.898141] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781186, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.012020] env[68244]: DEBUG nova.scheduler.client.report [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1170.167219] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fb8ca52c-f3e6-4cf2-9d35-7d196cea8491 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.807s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.400087] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648777} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.400478] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab/da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1170.400741] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1170.401041] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31e93dd0-bcfd-4af8-b112-ccc9e80549bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.408381] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1170.408381] env[68244]: value = "task-2781187" [ 1170.408381] env[68244]: _type = "Task" [ 1170.408381] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.417882] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781187, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.432874] env[68244]: INFO nova.compute.manager [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Rebuilding instance [ 1170.486743] env[68244]: DEBUG nova.compute.manager [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1170.486743] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3099de-8a31-4c67-bacd-29b4c6819831 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.518517] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.522132] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.844s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.525266] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.527196] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.837s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.527347] env[68244]: DEBUG nova.objects.instance [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'resources' on Instance uuid d46f6695-7a96-4e0b-b43a-236bcb4ec519 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1170.550244] env[68244]: INFO nova.scheduler.client.report [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted allocations for instance 10e67250-5ddc-430d-aac7-4e6bae0778e5 [ 1170.556256] env[68244]: INFO nova.scheduler.client.report [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleted allocations for instance df935885-c313-473d-aa3a-ba81aa999554 [ 1170.920883] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781187, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190878} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.925244] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1170.925244] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43669401-17aa-44cb-8650-f822bc9db5e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.955012] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab/da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.955012] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4ed178a-4715-4ef7-8197-aed485caa6ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.979722] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1170.979722] env[68244]: value = "task-2781188" [ 1170.979722] env[68244]: _type = "Task" [ 1170.979722] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.991803] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781188, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.066948] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d7d6b01c-6e66-4e85-a5d7-fefba2f63e58 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "10e67250-5ddc-430d-aac7-4e6bae0778e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.188s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.068163] env[68244]: DEBUG oslo_concurrency.lockutils [None req-512fcbc1-e705-4de6-9636-857a1f67b29e tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "df935885-c313-473d-aa3a-ba81aa999554" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.713s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.121770] env[68244]: DEBUG nova.compute.manager [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Received event network-changed-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1171.122040] env[68244]: DEBUG nova.compute.manager [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Refreshing instance network info cache due to event network-changed-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1171.122266] env[68244]: DEBUG oslo_concurrency.lockutils [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] Acquiring lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.122842] env[68244]: DEBUG oslo_concurrency.lockutils [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] Acquired lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.123685] env[68244]: DEBUG nova.network.neutron [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Refreshing network info cache for port 0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1171.207518] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea36b629-6293-43cf-b632-9104dfcb5dcb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.215882] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0668c6d-5f45-467a-aa7c-b5e093a20587 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.252051] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec95cd6c-03b8-44cf-99ad-d7604a240319 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.264070] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ddef71-fba8-499d-8ab2-e6b9ccae522f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.288863] env[68244]: DEBUG nova.compute.provider_tree [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1171.494630] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781188, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.502669] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.503822] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ef4da56-3b80-4b2b-8589-e1e8d3274b54 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.513019] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1171.513019] env[68244]: value = "task-2781189" [ 1171.513019] env[68244]: _type = "Task" [ 1171.513019] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.518632] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.836435] env[68244]: DEBUG nova.scheduler.client.report [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 149 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1171.836736] env[68244]: DEBUG nova.compute.provider_tree [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 149 to 150 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1171.837482] env[68244]: DEBUG nova.compute.provider_tree [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1171.971540] env[68244]: DEBUG nova.network.neutron [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Updated VIF entry in instance network info cache for port 0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.972099] env[68244]: DEBUG nova.network.neutron [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Updating instance_info_cache with network_info: [{"id": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "address": "fa:16:3e:e0:e4:c8", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f6b8a1d-f6", "ovs_interfaceid": "0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.000455] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781188, 'name': ReconfigVM_Task, 'duration_secs': 0.659851} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.000743] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Reconfigured VM instance instance-00000065 to attach disk [datastore2] da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab/da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1172.001420] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0cda868-8346-437f-9d6d-165b1de42f19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.008770] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1172.008770] env[68244]: value = "task-2781190" [ 1172.008770] env[68244]: _type = "Task" [ 1172.008770] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.022999] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781190, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.026642] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1172.027391] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1172.028222] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cf9709-cb71-46ac-be3d-1d3efe793bb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.037577] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1172.038118] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d756124-099f-4e90-9f70-877b39d5a7f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.040527] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.040785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.104838] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1172.105080] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1172.105265] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleting the datastore file [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1172.105537] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9828f917-af67-4d5a-a830-c05ff15dc1c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.115401] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1172.115401] env[68244]: value = "task-2781192" [ 1172.115401] env[68244]: _type = "Task" [ 1172.115401] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.129624] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.344043] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.361455] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.361611] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.374765] env[68244]: INFO nova.scheduler.client.report [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted allocations for instance d46f6695-7a96-4e0b-b43a-236bcb4ec519 [ 1172.474745] env[68244]: DEBUG oslo_concurrency.lockutils [req-34e7ec3c-6a07-42e9-8c79-fcbf752f5b01 req-67d22a43-3948-44e0-97cc-359a53024500 service nova] Releasing lock "refresh_cache-7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.518412] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781190, 'name': Rename_Task, 'duration_secs': 0.237112} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.518754] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1172.518952] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abc68bc2-2221-462c-b400-d9ca491edbea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.525588] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1172.525588] env[68244]: value = "task-2781193" [ 1172.525588] env[68244]: _type = "Task" [ 1172.525588] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.533316] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.544254] env[68244]: INFO nova.compute.manager [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Detaching volume 81bf204f-6899-4aff-b7a9-850f43b0444c [ 1172.590191] env[68244]: INFO nova.virt.block_device [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Attempting to driver detach volume 81bf204f-6899-4aff-b7a9-850f43b0444c from mountpoint /dev/sdb [ 1172.590528] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1172.590845] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559117', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'name': 'volume-81bf204f-6899-4aff-b7a9-850f43b0444c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ba4f3f5-726e-482f-a821-d2ee1bbd4c33', 'attached_at': '', 'detached_at': '', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'serial': '81bf204f-6899-4aff-b7a9-850f43b0444c'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1172.591885] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb367d4-9be2-41f0-bf43-15bb24be39f2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.615052] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30235ee5-289e-45d4-80c4-8164b808cca2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.628394] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7790707c-da4a-415c-87de-60350f587dea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.632023] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239956} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.632362] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.632596] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.632821] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.657161] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b639c2c-2a48-4d72-a058-d9caa54e43da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.673216] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] The volume has not been displaced from its original location: [datastore2] volume-81bf204f-6899-4aff-b7a9-850f43b0444c/volume-81bf204f-6899-4aff-b7a9-850f43b0444c.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1172.679273] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1172.680419] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf73e12f-39e9-4a33-8038-dae8df4bc37e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.700559] env[68244]: DEBUG oslo_vmware.api [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1172.700559] env[68244]: value = "task-2781194" [ 1172.700559] env[68244]: _type = "Task" [ 1172.700559] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.708542] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.708865] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "375c4371-3537-4a94-987e-0f6f72a690b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.716457] env[68244]: DEBUG oslo_vmware.api [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781194, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.864148] env[68244]: DEBUG nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1172.882812] env[68244]: DEBUG oslo_concurrency.lockutils [None req-4d46d9c7-dfd0-4d30-bda3-8d6157cc5206 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "d46f6695-7a96-4e0b-b43a-236bcb4ec519" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.958s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.036485] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781193, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.214691] env[68244]: DEBUG nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1173.217364] env[68244]: DEBUG oslo_vmware.api [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781194, 'name': ReconfigVM_Task, 'duration_secs': 0.24679} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.218091] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1173.223622] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4523133e-36ad-4363-9d0d-eb0c7cb2b798 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.241583] env[68244]: DEBUG oslo_vmware.api [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1173.241583] env[68244]: value = "task-2781195" [ 1173.241583] env[68244]: _type = "Task" [ 1173.241583] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.251972] env[68244]: DEBUG oslo_vmware.api [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781195, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.386917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.387213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.389176] env[68244]: INFO nova.compute.claims [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.536672] env[68244]: DEBUG oslo_vmware.api [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781193, 'name': PowerOnVM_Task, 'duration_secs': 0.87115} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.537235] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1173.537567] env[68244]: INFO nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Took 9.18 seconds to spawn the instance on the hypervisor. [ 1173.537855] env[68244]: DEBUG nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1173.538750] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748ac986-3177-4442-b947-55c1adb3c1e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.672958] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1173.673240] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1173.673398] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1173.673607] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1173.673711] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1173.674951] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1173.674951] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1173.674951] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1173.674951] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1173.674951] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1173.674951] env[68244]: DEBUG nova.virt.hardware [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1173.675611] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d096925f-c8c1-486c-a984-60a1d01926d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.684500] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3586dc8-9427-421e-abd0-500a388a2eff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.702399] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:be:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5615ea2e-6d28-4e93-8c36-fce6a7bd1b07', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.709653] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1173.709882] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1173.710215] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4304852-176d-43e5-b234-3a807686576f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.736046] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.736254] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.741721] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.741721] env[68244]: value = "task-2781196" [ 1173.741721] env[68244]: _type = "Task" [ 1173.741721] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.751412] env[68244]: DEBUG oslo_vmware.api [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781195, 'name': ReconfigVM_Task, 'duration_secs': 0.156605} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.754694] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559117', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'name': 'volume-81bf204f-6899-4aff-b7a9-850f43b0444c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1ba4f3f5-726e-482f-a821-d2ee1bbd4c33', 'attached_at': '', 'detached_at': '', 'volume_id': '81bf204f-6899-4aff-b7a9-850f43b0444c', 'serial': '81bf204f-6899-4aff-b7a9-850f43b0444c'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1173.757154] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781196, 'name': CreateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.758608] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.059329] env[68244]: INFO nova.compute.manager [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Took 20.33 seconds to build instance. [ 1174.241363] env[68244]: DEBUG nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1174.259806] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781196, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.315021] env[68244]: DEBUG nova.objects.instance [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'flavor' on Instance uuid 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.562318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cd3722d-309e-4484-a3db-3087136dc706 tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.844s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.572321] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df73280a-5d79-40a8-9ac5-d368604e15b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.579549] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac3103f-e490-4fc2-83c7-522fcd4f6644 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.623749] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3eeeca4-91b5-4d7d-8a13-764e17f86ce3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.631601] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317446be-7220-48eb-96b9-0ec026c49841 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.646376] env[68244]: DEBUG nova.compute.provider_tree [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1174.758992] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781196, 'name': CreateVM_Task, 'duration_secs': 0.636823} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.759180] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1174.759852] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.760021] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.760331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1174.760613] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d7ee611-9722-4aae-8634-26df5b4d9367 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.763338] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.764839] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1174.764839] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d644ba-3ffc-0bf4-2132-d5753c46d3f0" [ 1174.764839] env[68244]: _type = "Task" [ 1174.764839] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.772658] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d644ba-3ffc-0bf4-2132-d5753c46d3f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.174045] env[68244]: ERROR nova.scheduler.client.report [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [req-e4bd8535-ec8a-44d8-837d-8b89410fc8c8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e4bd8535-ec8a-44d8-837d-8b89410fc8c8"}]} [ 1175.192802] env[68244]: DEBUG nova.scheduler.client.report [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1175.205366] env[68244]: DEBUG nova.scheduler.client.report [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1175.205594] env[68244]: DEBUG nova.compute.provider_tree [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1175.216816] env[68244]: DEBUG nova.scheduler.client.report [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1175.236209] env[68244]: DEBUG nova.scheduler.client.report [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1175.236713] env[68244]: DEBUG nova.compute.provider_tree [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 153 to 154 during operation: update_traits {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1175.277542] env[68244]: DEBUG nova.compute.manager [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Received event network-changed-ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1175.277542] env[68244]: DEBUG nova.compute.manager [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Refreshing instance network info cache due to event network-changed-ffb04675-4c65-4d62-ab42-459a01bb68b5. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1175.277542] env[68244]: DEBUG oslo_concurrency.lockutils [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] Acquiring lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.277542] env[68244]: DEBUG oslo_concurrency.lockutils [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] Acquired lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.277542] env[68244]: DEBUG nova.network.neutron [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Refreshing network info cache for port ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1175.285213] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d644ba-3ffc-0bf4-2132-d5753c46d3f0, 'name': SearchDatastore_Task, 'duration_secs': 0.035131} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.288470] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.288470] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1175.288611] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.288705] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.288889] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1175.289829] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed1c1325-5a58-4cdc-ae31-b7f01557736e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.300302] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1175.300302] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1175.300302] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0823458b-b756-47dc-bfd3-763cfbec4026 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.309092] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1175.309092] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522dc5e8-40e7-e25d-eb3b-a3f9666d23df" [ 1175.309092] env[68244]: _type = "Task" [ 1175.309092] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.319599] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522dc5e8-40e7-e25d-eb3b-a3f9666d23df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.322503] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3876f565-ca28-49e0-bef9-978913a65430 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.282s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.414410] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe9bbd1-cfc9-4c9b-afdb-cbb4458d8901 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.422679] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f904f3f-3af6-4e95-9584-a100e854c524 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.454282] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f690a9e1-6f92-426a-a525-0dcabe42568c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.461301] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0647c760-0015-49e3-a3b2-fe33fceb1b70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.474663] env[68244]: DEBUG nova.compute.provider_tree [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1175.485583] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.485583] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.485798] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.485992] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.486176] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.487950] env[68244]: INFO nova.compute.manager [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Terminating instance [ 1175.688450] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.688690] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.821217] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522dc5e8-40e7-e25d-eb3b-a3f9666d23df, 'name': SearchDatastore_Task, 'duration_secs': 0.010197} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.822086] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4180a23-6384-43ee-9869-f9aa3e22ca66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.827339] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1175.827339] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d44d4d-0474-7180-1c26-d1209eeee1a1" [ 1175.827339] env[68244]: _type = "Task" [ 1175.827339] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.835149] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d44d4d-0474-7180-1c26-d1209eeee1a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.991760] env[68244]: DEBUG nova.compute.manager [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1175.992068] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1175.995402] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cde42bd-0c0a-4bfe-bd51-00b8add04f5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.005359] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1176.005632] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c69a625b-5c72-450c-acc0-fdf35a07c285 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.011292] env[68244]: DEBUG nova.scheduler.client.report [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1176.011552] env[68244]: DEBUG nova.compute.provider_tree [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 154 to 155 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1176.011766] env[68244]: DEBUG nova.compute.provider_tree [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1176.016459] env[68244]: DEBUG oslo_vmware.api [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1176.016459] env[68244]: value = "task-2781197" [ 1176.016459] env[68244]: _type = "Task" [ 1176.016459] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.024439] env[68244]: DEBUG oslo_vmware.api [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781197, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.031508] env[68244]: DEBUG nova.network.neutron [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Updated VIF entry in instance network info cache for port ffb04675-4c65-4d62-ab42-459a01bb68b5. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1176.031897] env[68244]: DEBUG nova.network.neutron [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Updating instance_info_cache with network_info: [{"id": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "address": "fa:16:3e:e6:6f:21", "network": {"id": "f3ea2693-a0b2-4d06-8d5d-cee6e9b8dfc4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1228942130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d5a65317c2844988b2bed143e0529ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffb04675-4c", "ovs_interfaceid": "ffb04675-4c65-4d62-ab42-459a01bb68b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.194068] env[68244]: DEBUG nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1176.338261] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d44d4d-0474-7180-1c26-d1209eeee1a1, 'name': SearchDatastore_Task, 'duration_secs': 0.010595} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.338488] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.338784] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1176.339358] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-221a100a-f7e7-4fe8-9c3f-3555cda38536 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.346477] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1176.346477] env[68244]: value = "task-2781198" [ 1176.346477] env[68244]: _type = "Task" [ 1176.346477] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.354434] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.518294] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.131s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.518828] env[68244]: DEBUG nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1176.522043] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.763s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.524307] env[68244]: INFO nova.compute.claims [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1176.537047] env[68244]: DEBUG oslo_concurrency.lockutils [req-bd88220a-62bb-4c2b-805e-c2b65249a29f req-ac9570e1-0fbc-42d2-a422-dea50a28203e service nova] Releasing lock "refresh_cache-da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.537763] env[68244]: DEBUG oslo_vmware.api [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781197, 'name': PowerOffVM_Task, 'duration_secs': 0.185246} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.537885] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.538038] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1176.538706] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60e1f8bd-daf8-4165-b25e-ad9e21edcdf3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.611437] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1176.611730] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Deleting contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1176.611926] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleting the datastore file [datastore1] 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.612223] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a91fecf5-a333-46b4-8134-d3133c34bb5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.620230] env[68244]: DEBUG oslo_vmware.api [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1176.620230] env[68244]: value = "task-2781200" [ 1176.620230] env[68244]: _type = "Task" [ 1176.620230] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.628697] env[68244]: DEBUG oslo_vmware.api [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.723048] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.856085] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473065} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.856355] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1176.856571] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1176.856992] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3994b82b-03da-4c4a-83ab-3b48d7a86a4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.862698] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1176.862698] env[68244]: value = "task-2781201" [ 1176.862698] env[68244]: _type = "Task" [ 1176.862698] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.869741] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.032891] env[68244]: DEBUG nova.compute.utils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1177.036378] env[68244]: DEBUG nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1177.036574] env[68244]: DEBUG nova.network.neutron [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1177.089498] env[68244]: DEBUG nova.policy [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '813e863e39a449dd915ef45aa553cdab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '207109eb01bd42b081cc66385789ab80', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1177.129402] env[68244]: DEBUG oslo_vmware.api [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345485} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.129657] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1177.129847] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Deleted contents of the VM from datastore datastore1 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1177.130043] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1177.130225] env[68244]: INFO nova.compute.manager [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1177.130469] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.130656] env[68244]: DEBUG nova.compute.manager [-] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1177.130771] env[68244]: DEBUG nova.network.neutron [-] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1177.375806] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063778} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.376583] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1177.377151] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b77a202-6cc3-4931-b65a-6ae24f66f8b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.402024] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.402519] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c62b685-922c-4eb7-b680-944346898bd3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.426888] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1177.426888] env[68244]: value = "task-2781202" [ 1177.426888] env[68244]: _type = "Task" [ 1177.426888] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.431016] env[68244]: DEBUG nova.network.neutron [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Successfully created port: 42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1177.439598] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781202, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.537624] env[68244]: DEBUG nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1177.638480] env[68244]: DEBUG nova.compute.manager [req-9b497305-2c62-48d9-bd19-d249a2c46bc7 req-25f26dec-7f2a-424e-8edf-b2be929c53ae service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Received event network-vif-deleted-57ce8ddd-0a20-4416-bf55-acd66870ad00 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1177.638783] env[68244]: INFO nova.compute.manager [req-9b497305-2c62-48d9-bd19-d249a2c46bc7 req-25f26dec-7f2a-424e-8edf-b2be929c53ae service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Neutron deleted interface 57ce8ddd-0a20-4416-bf55-acd66870ad00; detaching it from the instance and deleting it from the info cache [ 1177.639051] env[68244]: DEBUG nova.network.neutron [req-9b497305-2c62-48d9-bd19-d249a2c46bc7 req-25f26dec-7f2a-424e-8edf-b2be929c53ae service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.705802] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c53a47-f1eb-4450-9dd1-b9f5fb1ab7dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.714565] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6981b0-2a2a-4823-9f14-e8f4e364a54e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.750673] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ab21f1-af0c-4521-827c-bc8ad231e132 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.758181] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d246bb6b-9540-44a1-a9d9-e6746eea8341 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.771951] env[68244]: DEBUG nova.compute.provider_tree [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.938915] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.110037] env[68244]: DEBUG nova.network.neutron [-] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.141687] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09289795-dcd6-49fb-9446-ab07b0e40103 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.150918] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9714f554-ffe7-4ef6-a478-419baca6f4ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.179959] env[68244]: DEBUG nova.compute.manager [req-9b497305-2c62-48d9-bd19-d249a2c46bc7 req-25f26dec-7f2a-424e-8edf-b2be929c53ae service nova] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Detach interface failed, port_id=57ce8ddd-0a20-4416-bf55-acd66870ad00, reason: Instance 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1178.274898] env[68244]: DEBUG nova.scheduler.client.report [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1178.437861] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.551126] env[68244]: DEBUG nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1178.577250] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.577506] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.577662] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.577841] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.577999] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.578165] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.578428] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.578652] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.578843] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.579018] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.579192] env[68244]: DEBUG nova.virt.hardware [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.580127] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fce259-ea60-4ca2-ba76-20b557f4aaeb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.588060] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efab052-5632-479a-9bca-447ffa54d944 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.612142] env[68244]: INFO nova.compute.manager [-] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Took 1.48 seconds to deallocate network for instance. [ 1178.780048] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.780695] env[68244]: DEBUG nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1178.783910] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.021s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.785367] env[68244]: INFO nova.compute.claims [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1178.858363] env[68244]: DEBUG nova.compute.manager [req-d18112d6-8f55-4990-9725-add17b15f170 req-e7ffc95b-f826-4173-9602-6f19bc2bf000 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Received event network-vif-plugged-42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1178.858667] env[68244]: DEBUG oslo_concurrency.lockutils [req-d18112d6-8f55-4990-9725-add17b15f170 req-e7ffc95b-f826-4173-9602-6f19bc2bf000 service nova] Acquiring lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.858967] env[68244]: DEBUG oslo_concurrency.lockutils [req-d18112d6-8f55-4990-9725-add17b15f170 req-e7ffc95b-f826-4173-9602-6f19bc2bf000 service nova] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.859622] env[68244]: DEBUG oslo_concurrency.lockutils [req-d18112d6-8f55-4990-9725-add17b15f170 req-e7ffc95b-f826-4173-9602-6f19bc2bf000 service nova] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.859622] env[68244]: DEBUG nova.compute.manager [req-d18112d6-8f55-4990-9725-add17b15f170 req-e7ffc95b-f826-4173-9602-6f19bc2bf000 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] No waiting events found dispatching network-vif-plugged-42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1178.859622] env[68244]: WARNING nova.compute.manager [req-d18112d6-8f55-4990-9725-add17b15f170 req-e7ffc95b-f826-4173-9602-6f19bc2bf000 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Received unexpected event network-vif-plugged-42390128-dc00-4c43-bb63-04d49b817a2f for instance with vm_state building and task_state spawning. [ 1178.939507] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781202, 'name': ReconfigVM_Task, 'duration_secs': 1.065377} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.939795] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Reconfigured VM instance instance-00000063 to attach disk [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61/b036365a-87d7-44ea-b439-80f1fe0c5f61.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.940509] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05d14658-6460-4ec3-bd56-eec69262518c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.943226] env[68244]: DEBUG nova.network.neutron [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Successfully updated port: 42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.946806] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1178.946806] env[68244]: value = "task-2781203" [ 1178.946806] env[68244]: _type = "Task" [ 1178.946806] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.954762] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781203, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.118325] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.292224] env[68244]: DEBUG nova.compute.utils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1179.294166] env[68244]: DEBUG nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1179.294166] env[68244]: DEBUG nova.network.neutron [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1179.366448] env[68244]: DEBUG nova.policy [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1179.445398] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.445648] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.445715] env[68244]: DEBUG nova.network.neutron [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.457123] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781203, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.752856] env[68244]: DEBUG nova.network.neutron [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Successfully created port: 35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1179.797728] env[68244]: DEBUG nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1179.942301] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4985e4b-5b6c-4d5f-a068-902958734cee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.954401] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40adc252-adb9-4dd2-9f31-4cc937307b4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.962524] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781203, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.986708] env[68244]: DEBUG nova.network.neutron [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1179.989084] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3fd654-a0e9-4ea0-bdf8-62a6261604cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.996300] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9f0dee-1305-41e9-95f9-a23f02cadf65 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.010085] env[68244]: DEBUG nova.compute.provider_tree [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.111188] env[68244]: DEBUG nova.network.neutron [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Updating instance_info_cache with network_info: [{"id": "42390128-dc00-4c43-bb63-04d49b817a2f", "address": "fa:16:3e:22:00:0c", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42390128-dc", "ovs_interfaceid": "42390128-dc00-4c43-bb63-04d49b817a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.460646] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781203, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.514649] env[68244]: DEBUG nova.scheduler.client.report [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.614092] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.614430] env[68244]: DEBUG nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Instance network_info: |[{"id": "42390128-dc00-4c43-bb63-04d49b817a2f", "address": "fa:16:3e:22:00:0c", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42390128-dc", "ovs_interfaceid": "42390128-dc00-4c43-bb63-04d49b817a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1180.614841] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:00:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42390128-dc00-4c43-bb63-04d49b817a2f', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.622491] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1180.622704] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1180.622931] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcb2f418-fe68-4c35-9be1-a2cca621fb3c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.643518] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.643518] env[68244]: value = "task-2781204" [ 1180.643518] env[68244]: _type = "Task" [ 1180.643518] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.652294] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781204, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.807282] env[68244]: DEBUG nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1180.835626] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1180.835872] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1180.836043] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1180.836229] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1180.836376] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1180.836522] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1180.836730] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1180.836890] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1180.837094] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1180.837240] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1180.837412] env[68244]: DEBUG nova.virt.hardware [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1180.838312] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbdad5c-9a82-4069-a528-ed30631c9829 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.845954] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f48f9e-3539-4017-9823-57dc7969f456 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.884889] env[68244]: DEBUG nova.compute.manager [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Received event network-changed-42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1180.885024] env[68244]: DEBUG nova.compute.manager [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Refreshing instance network info cache due to event network-changed-42390128-dc00-4c43-bb63-04d49b817a2f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1180.885288] env[68244]: DEBUG oslo_concurrency.lockutils [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] Acquiring lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.885440] env[68244]: DEBUG oslo_concurrency.lockutils [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] Acquired lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.885600] env[68244]: DEBUG nova.network.neutron [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Refreshing network info cache for port 42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.961290] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781203, 'name': Rename_Task, 'duration_secs': 1.637285} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.961573] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1180.961930] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44f79bd7-97e1-4211-aefb-dff3bd7d9ffa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.968707] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1180.968707] env[68244]: value = "task-2781205" [ 1180.968707] env[68244]: _type = "Task" [ 1180.968707] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.976139] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.020239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.020835] env[68244]: DEBUG nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1181.023749] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.301s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.025238] env[68244]: INFO nova.compute.claims [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1181.157459] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781204, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.206846] env[68244]: DEBUG nova.network.neutron [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Successfully updated port: 35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1181.484963] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781205, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.530017] env[68244]: DEBUG nova.compute.utils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1181.534038] env[68244]: DEBUG nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1181.534223] env[68244]: DEBUG nova.network.neutron [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1181.575680] env[68244]: DEBUG nova.policy [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c1b19dbdda14a3a9573669bcc82de67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a680ba4b4d446ecbe096355f9eb3b7b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1181.653691] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781204, 'name': CreateVM_Task, 'duration_secs': 0.65419} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.653826] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1181.654540] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.654706] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.655048] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1181.656010] env[68244]: DEBUG nova.network.neutron [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Updated VIF entry in instance network info cache for port 42390128-dc00-4c43-bb63-04d49b817a2f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1181.656319] env[68244]: DEBUG nova.network.neutron [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Updating instance_info_cache with network_info: [{"id": "42390128-dc00-4c43-bb63-04d49b817a2f", "address": "fa:16:3e:22:00:0c", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42390128-dc", "ovs_interfaceid": "42390128-dc00-4c43-bb63-04d49b817a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.657435] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1c61ce9-6608-45df-8ae0-5ace5edae3f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.662084] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1181.662084] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52332c79-1af3-ed78-42ae-47a460195149" [ 1181.662084] env[68244]: _type = "Task" [ 1181.662084] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.669850] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52332c79-1af3-ed78-42ae-47a460195149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.708894] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.709059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.709219] env[68244]: DEBUG nova.network.neutron [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.835889] env[68244]: DEBUG nova.network.neutron [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Successfully created port: 8419e732-58f6-46b8-8230-9288039f5ac7 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1181.980067] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781205, 'name': PowerOnVM_Task, 'duration_secs': 0.541866} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.980380] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1181.980586] env[68244]: DEBUG nova.compute.manager [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1181.981407] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702e602c-2865-4ae3-85bb-82b7b675f0f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.034999] env[68244]: DEBUG nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1182.160871] env[68244]: DEBUG oslo_concurrency.lockutils [req-17fe0206-7f37-4620-afc5-21b19947439c req-2c87eecc-84de-49cc-9b56-a93faa8cc94c service nova] Releasing lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.174149] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52332c79-1af3-ed78-42ae-47a460195149, 'name': SearchDatastore_Task, 'duration_secs': 0.008786} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.175153] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.175376] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1182.175613] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.175762] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.175974] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1182.176700] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc5404e-107e-4c2c-84ee-cc7b9e7ec67e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.179151] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7598222-46ae-41d1-9e37-4ddd10b43246 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.187095] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901e114f-2e1a-427b-b4c8-7a46a4b6b0d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.192027] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1182.192201] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1182.216254] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65dda18e-40a1-4aa0-a1c6-f1a09bca8263 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.221101] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bc2330-0d14-4472-b293-31265322f9c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.226602] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1182.226602] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d4d089-13e3-f845-6f9f-80ed6e186691" [ 1182.226602] env[68244]: _type = "Task" [ 1182.226602] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.232129] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112acc2e-dc3a-45d4-9c77-e0a0f14418c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.240045] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d4d089-13e3-f845-6f9f-80ed6e186691, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.247939] env[68244]: DEBUG nova.compute.provider_tree [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.249808] env[68244]: DEBUG nova.network.neutron [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1182.387991] env[68244]: DEBUG nova.network.neutron [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.492441] env[68244]: INFO nova.compute.manager [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] bringing vm to original state: 'stopped' [ 1182.738479] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d4d089-13e3-f845-6f9f-80ed6e186691, 'name': SearchDatastore_Task, 'duration_secs': 0.020605} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.739265] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e932d524-3824-4dad-9390-a682e4f64ce6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.744662] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1182.744662] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529a2c2d-24a3-ac62-a622-73a256f63afd" [ 1182.744662] env[68244]: _type = "Task" [ 1182.744662] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.752752] env[68244]: DEBUG nova.scheduler.client.report [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.755785] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529a2c2d-24a3-ac62-a622-73a256f63afd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.891303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.891641] env[68244]: DEBUG nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Instance network_info: |[{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1182.892078] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:8e:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35117f70-8f49-457b-b347-f4aff8b3b1b3', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1182.899965] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1182.900566] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1182.900834] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4eb43ecf-2f2a-4c8f-b98d-540b52530e16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.919241] env[68244]: DEBUG nova.compute.manager [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-vif-plugged-35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1182.919454] env[68244]: DEBUG oslo_concurrency.lockutils [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.919658] env[68244]: DEBUG oslo_concurrency.lockutils [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.919864] env[68244]: DEBUG oslo_concurrency.lockutils [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.920044] env[68244]: DEBUG nova.compute.manager [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] No waiting events found dispatching network-vif-plugged-35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1182.920254] env[68244]: WARNING nova.compute.manager [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received unexpected event network-vif-plugged-35117f70-8f49-457b-b347-f4aff8b3b1b3 for instance with vm_state building and task_state spawning. [ 1182.920453] env[68244]: DEBUG nova.compute.manager [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-changed-35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1182.920610] env[68244]: DEBUG nova.compute.manager [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing instance network info cache due to event network-changed-35117f70-8f49-457b-b347-f4aff8b3b1b3. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1182.920809] env[68244]: DEBUG oslo_concurrency.lockutils [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.920956] env[68244]: DEBUG oslo_concurrency.lockutils [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.921120] env[68244]: DEBUG nova.network.neutron [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing network info cache for port 35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1182.927608] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1182.927608] env[68244]: value = "task-2781206" [ 1182.927608] env[68244]: _type = "Task" [ 1182.927608] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.936445] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781206, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.048316] env[68244]: DEBUG nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1183.075755] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.076014] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.076177] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.076357] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.076501] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.076646] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.076847] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.077015] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.077189] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.077360] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.077532] env[68244]: DEBUG nova.virt.hardware [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.078805] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b5694f-7e33-42dc-b386-a4dca4a9dd7f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.086918] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b572e07-2917-48d3-9a27-c1222fa7cf96 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.197054] env[68244]: DEBUG nova.compute.manager [req-6bde414f-d27c-41f5-af14-bd19db652016 req-02747eef-a2d6-411c-8e8e-d482cd6cfb25 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Received event network-vif-plugged-8419e732-58f6-46b8-8230-9288039f5ac7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1183.197054] env[68244]: DEBUG oslo_concurrency.lockutils [req-6bde414f-d27c-41f5-af14-bd19db652016 req-02747eef-a2d6-411c-8e8e-d482cd6cfb25 service nova] Acquiring lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.197199] env[68244]: DEBUG oslo_concurrency.lockutils [req-6bde414f-d27c-41f5-af14-bd19db652016 req-02747eef-a2d6-411c-8e8e-d482cd6cfb25 service nova] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.197370] env[68244]: DEBUG oslo_concurrency.lockutils [req-6bde414f-d27c-41f5-af14-bd19db652016 req-02747eef-a2d6-411c-8e8e-d482cd6cfb25 service nova] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.197548] env[68244]: DEBUG nova.compute.manager [req-6bde414f-d27c-41f5-af14-bd19db652016 req-02747eef-a2d6-411c-8e8e-d482cd6cfb25 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] No waiting events found dispatching network-vif-plugged-8419e732-58f6-46b8-8230-9288039f5ac7 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1183.197716] env[68244]: WARNING nova.compute.manager [req-6bde414f-d27c-41f5-af14-bd19db652016 req-02747eef-a2d6-411c-8e8e-d482cd6cfb25 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Received unexpected event network-vif-plugged-8419e732-58f6-46b8-8230-9288039f5ac7 for instance with vm_state building and task_state spawning. [ 1183.255344] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529a2c2d-24a3-ac62-a622-73a256f63afd, 'name': SearchDatastore_Task, 'duration_secs': 0.023069} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.255622] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.255885] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 77ba8e47-10bb-4630-bd89-067f5ad7bad9/77ba8e47-10bb-4630-bd89-067f5ad7bad9.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1183.256170] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da53b382-1fa2-432e-ac40-1bcc89ad7ea0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.258455] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.235s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.259111] env[68244]: DEBUG nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1183.261920] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.144s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.262149] env[68244]: DEBUG nova.objects.instance [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'resources' on Instance uuid 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.268296] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1183.268296] env[68244]: value = "task-2781207" [ 1183.268296] env[68244]: _type = "Task" [ 1183.268296] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.275841] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.279146] env[68244]: DEBUG nova.network.neutron [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Successfully updated port: 8419e732-58f6-46b8-8230-9288039f5ac7 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1183.437907] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781206, 'name': CreateVM_Task, 'duration_secs': 0.316468} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.438204] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1183.438914] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.439135] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.439482] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1183.439764] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38fc257d-a4af-40cb-9762-af3d303b6b63 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.445277] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1183.445277] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ae5ec7-ebbd-ffea-19ae-ffb4f6b03873" [ 1183.445277] env[68244]: _type = "Task" [ 1183.445277] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.454450] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ae5ec7-ebbd-ffea-19ae-ffb4f6b03873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.499698] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.500077] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.500217] env[68244]: DEBUG nova.compute.manager [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1183.501282] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377d856f-f6f2-4378-bbe5-a56a31f25f5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.508507] env[68244]: DEBUG nova.compute.manager [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1183.768860] env[68244]: DEBUG nova.compute.utils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1183.770165] env[68244]: DEBUG nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1183.770165] env[68244]: DEBUG nova.network.neutron [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1183.781769] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.409904} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.783719] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 77ba8e47-10bb-4630-bd89-067f5ad7bad9/77ba8e47-10bb-4630-bd89-067f5ad7bad9.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1183.783719] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1183.783719] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7da149a6-bf6d-44f7-ae72-c2a3a3131c46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.791732] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "refresh_cache-8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.792113] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquired lock "refresh_cache-8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.792367] env[68244]: DEBUG nova.network.neutron [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1183.803730] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1183.803730] env[68244]: value = "task-2781208" [ 1183.803730] env[68244]: _type = "Task" [ 1183.803730] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.813421] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.828175] env[68244]: DEBUG nova.policy [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '395d6679a62746ef8ed6f6f581c22944', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd41b4d274faa4f5a8951d39fa0d0c714', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1183.872960] env[68244]: DEBUG nova.network.neutron [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updated VIF entry in instance network info cache for port 35117f70-8f49-457b-b347-f4aff8b3b1b3. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1183.873301] env[68244]: DEBUG nova.network.neutron [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.953396] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64edde21-1e93-4246-a622-6b622323fe8c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.959612] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ae5ec7-ebbd-ffea-19ae-ffb4f6b03873, 'name': SearchDatastore_Task, 'duration_secs': 0.059391} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.960276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.960512] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1183.960760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.960945] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.961147] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.961419] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e4a90b8-3111-46c1-9826-3ab53a235c37 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.966497] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cb2033-170a-4bfd-ab19-aa11c23ce613 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.972611] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1183.972812] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1184.000322] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b382173f-c75b-4436-ac0d-f2714a2689a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.003559] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8086d101-ac00-4305-9248-36c6c7e5d5bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.010540] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1184.010540] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52086fdc-e1a0-1b65-ac2f-19e34855b773" [ 1184.010540] env[68244]: _type = "Task" [ 1184.010540] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.016178] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8e87d9-c6dc-4149-9de4-2788f80e19ee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.020431] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1184.021494] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddb62d2d-efec-4128-9654-b91071fb1ef2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.029751] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52086fdc-e1a0-1b65-ac2f-19e34855b773, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.039080] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1184.039080] env[68244]: value = "task-2781209" [ 1184.039080] env[68244]: _type = "Task" [ 1184.039080] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.039515] env[68244]: DEBUG nova.compute.provider_tree [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1184.048496] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781209, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.228946] env[68244]: DEBUG nova.network.neutron [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Successfully created port: 099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1184.274084] env[68244]: DEBUG nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1184.313986] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074983} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.314640] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1184.315124] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d801cd4a-b036-4815-891a-4c2efdc6def7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.341815] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 77ba8e47-10bb-4630-bd89-067f5ad7bad9/77ba8e47-10bb-4630-bd89-067f5ad7bad9.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1184.343529] env[68244]: DEBUG nova.network.neutron [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1184.345454] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a4eee89-3463-416c-adf1-68aaaa79e033 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.367256] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1184.367256] env[68244]: value = "task-2781210" [ 1184.367256] env[68244]: _type = "Task" [ 1184.367256] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.377306] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781210, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.379885] env[68244]: DEBUG oslo_concurrency.lockutils [req-58e93c17-a312-4f36-bf17-fa5c6fd4050d req-6bd5403c-b86c-4948-893e-e2b599315b54 service nova] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.499950] env[68244]: DEBUG nova.network.neutron [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Updating instance_info_cache with network_info: [{"id": "8419e732-58f6-46b8-8230-9288039f5ac7", "address": "fa:16:3e:1b:7d:a7", "network": {"id": "acb83eae-33c0-411d-a528-914e274b554a", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1981454213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a680ba4b4d446ecbe096355f9eb3b7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "753b69c3-4ef5-44a1-80cf-eab4edbe0dd5", "external-id": "nsx-vlan-transportzone-878", "segmentation_id": 878, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8419e732-58", "ovs_interfaceid": "8419e732-58f6-46b8-8230-9288039f5ac7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.524874] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52086fdc-e1a0-1b65-ac2f-19e34855b773, 'name': SearchDatastore_Task, 'duration_secs': 0.031471} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.525850] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70925b0e-ae2b-481c-b32f-6f493c167ebd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.531418] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1184.531418] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521459b8-8682-e06f-0aea-60f768de91cb" [ 1184.531418] env[68244]: _type = "Task" [ 1184.531418] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.539534] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521459b8-8682-e06f-0aea-60f768de91cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.552278] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781209, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.573702] env[68244]: DEBUG nova.scheduler.client.report [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 155 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1184.573993] env[68244]: DEBUG nova.compute.provider_tree [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 155 to 156 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1184.574209] env[68244]: DEBUG nova.compute.provider_tree [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1184.878364] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781210, 'name': ReconfigVM_Task, 'duration_secs': 0.290395} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.878747] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 77ba8e47-10bb-4630-bd89-067f5ad7bad9/77ba8e47-10bb-4630-bd89-067f5ad7bad9.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1184.879480] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f86f038-f9e3-43cb-b215-41a418669bcd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.887848] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1184.887848] env[68244]: value = "task-2781211" [ 1184.887848] env[68244]: _type = "Task" [ 1184.887848] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.898080] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781211, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.003432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Releasing lock "refresh_cache-8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.003785] env[68244]: DEBUG nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Instance network_info: |[{"id": "8419e732-58f6-46b8-8230-9288039f5ac7", "address": "fa:16:3e:1b:7d:a7", "network": {"id": "acb83eae-33c0-411d-a528-914e274b554a", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1981454213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a680ba4b4d446ecbe096355f9eb3b7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "753b69c3-4ef5-44a1-80cf-eab4edbe0dd5", "external-id": "nsx-vlan-transportzone-878", "segmentation_id": 878, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8419e732-58", "ovs_interfaceid": "8419e732-58f6-46b8-8230-9288039f5ac7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1185.004294] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:7d:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '753b69c3-4ef5-44a1-80cf-eab4edbe0dd5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8419e732-58f6-46b8-8230-9288039f5ac7', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.012036] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Creating folder: Project (0a680ba4b4d446ecbe096355f9eb3b7b). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1185.012143] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72ef1474-1b20-41ab-a95a-93a6b2da6d47 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.021917] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Created folder: Project (0a680ba4b4d446ecbe096355f9eb3b7b) in parent group-v558876. [ 1185.022129] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Creating folder: Instances. Parent ref: group-v559159. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1185.022381] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae4cc94c-6d66-4b28-b4e5-3325cf44c976 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.033027] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Created folder: Instances in parent group-v559159. [ 1185.033027] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1185.033027] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.033027] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1b2ba32-3e51-4d2b-90b5-d8b983239860 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.059999] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]521459b8-8682-e06f-0aea-60f768de91cb, 'name': SearchDatastore_Task, 'duration_secs': 0.018448} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.064753] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.065206] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 375c4371-3537-4a94-987e-0f6f72a690b8/375c4371-3537-4a94-987e-0f6f72a690b8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1185.065622] env[68244]: DEBUG oslo_vmware.api [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781209, 'name': PowerOffVM_Task, 'duration_secs': 0.893279} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.065936] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.065936] env[68244]: value = "task-2781214" [ 1185.065936] env[68244]: _type = "Task" [ 1185.065936] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.066334] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42dd667c-8fe9-4ca1-8bab-9a18227010c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.068712] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1185.068942] env[68244]: DEBUG nova.compute.manager [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1185.069764] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abac2def-23ea-45ea-b65c-dc2f9a77c849 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.081795] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.084062] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781214, 'name': CreateVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.088852] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1185.088852] env[68244]: value = "task-2781215" [ 1185.088852] env[68244]: _type = "Task" [ 1185.088852] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.098995] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781215, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.121120] env[68244]: INFO nova.scheduler.client.report [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted allocations for instance 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33 [ 1185.229944] env[68244]: DEBUG nova.compute.manager [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Received event network-changed-8419e732-58f6-46b8-8230-9288039f5ac7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1185.230199] env[68244]: DEBUG nova.compute.manager [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Refreshing instance network info cache due to event network-changed-8419e732-58f6-46b8-8230-9288039f5ac7. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1185.230389] env[68244]: DEBUG oslo_concurrency.lockutils [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] Acquiring lock "refresh_cache-8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.230535] env[68244]: DEBUG oslo_concurrency.lockutils [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] Acquired lock "refresh_cache-8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.230696] env[68244]: DEBUG nova.network.neutron [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Refreshing network info cache for port 8419e732-58f6-46b8-8230-9288039f5ac7 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1185.285401] env[68244]: DEBUG nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1185.315292] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1185.315557] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1185.315712] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1185.315909] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1185.316055] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1185.316206] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1185.316501] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1185.316790] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1185.316891] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1185.317021] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1185.317638] env[68244]: DEBUG nova.virt.hardware [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1185.318234] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb494de0-5f81-4063-b4af-dcb9e313e232 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.327237] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f27b845-4213-47ed-85ce-c4fd4aa5e0f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.397840] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781211, 'name': Rename_Task, 'duration_secs': 0.151485} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.398175] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1185.398451] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-081510b4-7258-484e-859f-001ed135b4aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.406451] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1185.406451] env[68244]: value = "task-2781216" [ 1185.406451] env[68244]: _type = "Task" [ 1185.406451] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.416259] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.578550] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781214, 'name': CreateVM_Task, 'duration_secs': 0.474828} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.579050] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1185.579416] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.579582] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.579891] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1185.580157] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7506b43d-6383-48c9-8a09-a77545149b3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.584511] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1185.584511] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527c47f4-7f64-afa0-6ccc-d02c040812a4" [ 1185.584511] env[68244]: _type = "Task" [ 1185.584511] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.594378] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527c47f4-7f64-afa0-6ccc-d02c040812a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.597323] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.097s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.602915] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781215, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454102} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.603093] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 375c4371-3537-4a94-987e-0f6f72a690b8/375c4371-3537-4a94-987e-0f6f72a690b8.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1185.603306] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1185.603534] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8fa830a-2e98-4f53-bfbf-b60bb6670e7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.609190] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1185.609190] env[68244]: value = "task-2781217" [ 1185.609190] env[68244]: _type = "Task" [ 1185.609190] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.617430] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.628333] env[68244]: DEBUG oslo_concurrency.lockutils [None req-613e43f9-b84d-4299-857c-27ea9395368e tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "1ba4f3f5-726e-482f-a821-d2ee1bbd4c33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.143s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.918166] env[68244]: DEBUG oslo_vmware.api [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781216, 'name': PowerOnVM_Task, 'duration_secs': 0.487481} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.918446] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1185.918651] env[68244]: INFO nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Took 7.37 seconds to spawn the instance on the hypervisor. [ 1185.918833] env[68244]: DEBUG nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1185.919625] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6d2ecc-cdd1-4b1f-b69c-724a15001631 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.948730] env[68244]: DEBUG nova.network.neutron [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Updated VIF entry in instance network info cache for port 8419e732-58f6-46b8-8230-9288039f5ac7. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1185.949163] env[68244]: DEBUG nova.network.neutron [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Updating instance_info_cache with network_info: [{"id": "8419e732-58f6-46b8-8230-9288039f5ac7", "address": "fa:16:3e:1b:7d:a7", "network": {"id": "acb83eae-33c0-411d-a528-914e274b554a", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1981454213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a680ba4b4d446ecbe096355f9eb3b7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "753b69c3-4ef5-44a1-80cf-eab4edbe0dd5", "external-id": "nsx-vlan-transportzone-878", "segmentation_id": 878, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8419e732-58", "ovs_interfaceid": "8419e732-58f6-46b8-8230-9288039f5ac7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.055488] env[68244]: DEBUG nova.compute.manager [req-1de8b9e1-115c-49c3-a9aa-42ee9fff2af3 req-a4f1d530-e1ed-4a55-b312-35f61c552765 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-vif-plugged-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1186.055721] env[68244]: DEBUG oslo_concurrency.lockutils [req-1de8b9e1-115c-49c3-a9aa-42ee9fff2af3 req-a4f1d530-e1ed-4a55-b312-35f61c552765 service nova] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.055932] env[68244]: DEBUG oslo_concurrency.lockutils [req-1de8b9e1-115c-49c3-a9aa-42ee9fff2af3 req-a4f1d530-e1ed-4a55-b312-35f61c552765 service nova] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.056124] env[68244]: DEBUG oslo_concurrency.lockutils [req-1de8b9e1-115c-49c3-a9aa-42ee9fff2af3 req-a4f1d530-e1ed-4a55-b312-35f61c552765 service nova] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.056294] env[68244]: DEBUG nova.compute.manager [req-1de8b9e1-115c-49c3-a9aa-42ee9fff2af3 req-a4f1d530-e1ed-4a55-b312-35f61c552765 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] No waiting events found dispatching network-vif-plugged-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1186.056463] env[68244]: WARNING nova.compute.manager [req-1de8b9e1-115c-49c3-a9aa-42ee9fff2af3 req-a4f1d530-e1ed-4a55-b312-35f61c552765 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received unexpected event network-vif-plugged-099ae899-d602-45fd-bdcf-deda125a5d3e for instance with vm_state building and task_state spawning. [ 1186.095245] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527c47f4-7f64-afa0-6ccc-d02c040812a4, 'name': SearchDatastore_Task, 'duration_secs': 0.012385} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.095513] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.096362] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1186.096362] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.096362] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.096362] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.096597] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c68b8c2-5aca-4b2d-9506-28f1f425ec91 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.105526] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.105715] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1186.106693] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.106912] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.107099] env[68244]: DEBUG nova.objects.instance [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1186.110905] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-350d6039-84bc-47ad-8e2b-877e924b713a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.119628] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1186.119628] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520aaa11-9067-b2bd-053c-795233811ecd" [ 1186.119628] env[68244]: _type = "Task" [ 1186.119628] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.122628] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063712} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.127236] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1186.127236] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ab3786-4a39-4542-b96a-c5809732bd60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.134047] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520aaa11-9067-b2bd-053c-795233811ecd, 'name': SearchDatastore_Task, 'duration_secs': 0.008832} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.155896] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 375c4371-3537-4a94-987e-0f6f72a690b8/375c4371-3537-4a94-987e-0f6f72a690b8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.156317] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bb89706-ab9a-4723-b30e-ba5a5869e195 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.158966] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7be53b76-bc4b-4e78-ac08-a3c458c85933 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.180743] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1186.180743] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524277e7-8871-4445-fb3e-e4152ead56b8" [ 1186.180743] env[68244]: _type = "Task" [ 1186.180743] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.185079] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1186.185079] env[68244]: value = "task-2781218" [ 1186.185079] env[68244]: _type = "Task" [ 1186.185079] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.191581] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524277e7-8871-4445-fb3e-e4152ead56b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.196325] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781218, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.230545] env[68244]: DEBUG nova.network.neutron [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Successfully updated port: 099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1186.319298] env[68244]: DEBUG oslo_concurrency.lockutils [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.319565] env[68244]: DEBUG oslo_concurrency.lockutils [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.436667] env[68244]: INFO nova.compute.manager [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Took 13.07 seconds to build instance. [ 1186.452318] env[68244]: DEBUG oslo_concurrency.lockutils [req-ebdd099d-3637-40db-8d2f-02a3d10614e5 req-06eee104-2dc7-4b1f-9d29-be59dc7d6522 service nova] Releasing lock "refresh_cache-8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.695528] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524277e7-8871-4445-fb3e-e4152ead56b8, 'name': SearchDatastore_Task, 'duration_secs': 0.01045} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.698552] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.698842] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c/8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1186.699157] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781218, 'name': ReconfigVM_Task, 'duration_secs': 0.27917} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.699363] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef126836-3372-4187-87b7-1a31ae12e90c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.701243] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 375c4371-3537-4a94-987e-0f6f72a690b8/375c4371-3537-4a94-987e-0f6f72a690b8.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1186.701911] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58a89f59-89a7-4058-bf94-72e2f3269ba9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.711344] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1186.711344] env[68244]: value = "task-2781219" [ 1186.711344] env[68244]: _type = "Task" [ 1186.711344] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.711608] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1186.711608] env[68244]: value = "task-2781220" [ 1186.711608] env[68244]: _type = "Task" [ 1186.711608] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.723091] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.731414] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781220, 'name': Rename_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.731550] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.731620] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.732604] env[68244]: DEBUG nova.network.neutron [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1186.823572] env[68244]: DEBUG nova.compute.utils [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1186.938614] env[68244]: DEBUG oslo_concurrency.lockutils [None req-425f25a9-8cc0-4a17-a15e-3bf1e93f8595 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.577s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.124776] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2cc7e89a-5fcb-4a93-a7b0-a5e1d7dcb989 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.228904] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458206} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.229319] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781220, 'name': Rename_Task, 'duration_secs': 0.142787} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.229588] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c/8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1187.229811] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1187.230087] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1187.230317] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5355629-e01f-4771-a613-5609aa41a515 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.233838] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4985af6d-98f2-4c47-9824-6dd2cd09d803 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.233838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.233838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.234115] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "b036365a-87d7-44ea-b439-80f1fe0c5f61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.234609] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.234609] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.239122] env[68244]: INFO nova.compute.manager [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Terminating instance [ 1187.247743] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1187.247743] env[68244]: value = "task-2781222" [ 1187.247743] env[68244]: _type = "Task" [ 1187.247743] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.248010] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1187.248010] env[68244]: value = "task-2781221" [ 1187.248010] env[68244]: _type = "Task" [ 1187.248010] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.265470] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781222, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.268312] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.296341] env[68244]: DEBUG nova.network.neutron [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1187.327854] env[68244]: DEBUG oslo_concurrency.lockutils [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.492773] env[68244]: DEBUG nova.network.neutron [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099ae899-d6", "ovs_interfaceid": "099ae899-d602-45fd-bdcf-deda125a5d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.746859] env[68244]: DEBUG nova.compute.manager [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1187.747174] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1187.748145] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffbd9b3-a5ed-49fe-bf17-f8c5ac98c928 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.762421] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.768115] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-acf20995-baca-430e-b808-3b9d13e844d0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.769711] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.769935] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781222, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.828707] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.828956] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.829157] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleting the datastore file [datastore2] b036365a-87d7-44ea-b439-80f1fe0c5f61 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.829436] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6a29fa9-7351-4de4-8a1e-8f3541db1168 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.835727] env[68244]: DEBUG oslo_vmware.api [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1187.835727] env[68244]: value = "task-2781224" [ 1187.835727] env[68244]: _type = "Task" [ 1187.835727] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.844413] env[68244]: DEBUG oslo_vmware.api [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.995638] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.995981] env[68244]: DEBUG nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Instance network_info: |[{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099ae899-d6", "ovs_interfaceid": "099ae899-d602-45fd-bdcf-deda125a5d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1187.996463] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:bb:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '099ae899-d602-45fd-bdcf-deda125a5d3e', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1188.004904] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.005139] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1188.005370] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-981d9dd2-3a59-4b44-b4cd-1e0b8c53a51d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.027927] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1188.027927] env[68244]: value = "task-2781225" [ 1188.027927] env[68244]: _type = "Task" [ 1188.027927] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.036087] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781225, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.091563] env[68244]: DEBUG nova.compute.manager [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1188.091770] env[68244]: DEBUG nova.compute.manager [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing instance network info cache due to event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1188.092232] env[68244]: DEBUG oslo_concurrency.lockutils [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] Acquiring lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.092376] env[68244]: DEBUG oslo_concurrency.lockutils [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] Acquired lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.092450] env[68244]: DEBUG nova.network.neutron [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1188.268954] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781222, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.272072] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.67928} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.272341] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.273136] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfc988b-b463-4485-b0c9-1a7e5efa77c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.297322] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c/8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.297647] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ffd7cab-6e8d-4958-8280-4e42ca0d5464 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.317363] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1188.317363] env[68244]: value = "task-2781226" [ 1188.317363] env[68244]: _type = "Task" [ 1188.317363] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.331478] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781226, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.345046] env[68244]: DEBUG oslo_vmware.api [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243637} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.345291] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.345455] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.345672] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.345868] env[68244]: INFO nova.compute.manager [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1188.346130] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.346325] env[68244]: DEBUG nova.compute.manager [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1188.346419] env[68244]: DEBUG nova.network.neutron [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.398130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.398395] env[68244]: DEBUG oslo_concurrency.lockutils [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.398625] env[68244]: INFO nova.compute.manager [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Attaching volume 29f1c6a5-12c0-422d-9d39-cae96c725dbd to /dev/sdb [ 1188.435979] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250ff00e-bfb1-4081-a070-d6b7498593f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.444162] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27727c41-e19e-4d49-89a0-1dd55947cc37 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.458701] env[68244]: DEBUG nova.virt.block_device [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating existing volume attachment record: 601e2618-be1b-440b-99fb-e16ba843b077 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1188.538122] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781225, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.760668] env[68244]: DEBUG oslo_vmware.api [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781222, 'name': PowerOnVM_Task, 'duration_secs': 1.047258} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.761250] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1188.761250] env[68244]: INFO nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Took 7.95 seconds to spawn the instance on the hypervisor. [ 1188.761447] env[68244]: DEBUG nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1188.762318] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e14e4f-221f-4561-aacf-a1c52482714f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.818365] env[68244]: DEBUG nova.network.neutron [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updated VIF entry in instance network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1188.818871] env[68244]: DEBUG nova.network.neutron [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099ae899-d6", "ovs_interfaceid": "099ae899-d602-45fd-bdcf-deda125a5d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.829903] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781226, 'name': ReconfigVM_Task, 'duration_secs': 0.456511} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.830217] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c/8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1188.831176] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b78bec1-37fb-4b43-b3aa-43f0956125d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.838537] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.838806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.839946] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1188.839946] env[68244]: value = "task-2781230" [ 1188.839946] env[68244]: _type = "Task" [ 1188.839946] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.849444] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781230, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.040887] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781225, 'name': CreateVM_Task, 'duration_secs': 0.531673} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.040974] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1189.041679] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.041881] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.042299] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1189.042575] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb6ad03c-9928-4583-b42c-7cb4de898616 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.047066] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1189.047066] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f2d9eb-3fc4-9276-55e3-89a6d6dd5005" [ 1189.047066] env[68244]: _type = "Task" [ 1189.047066] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.054918] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f2d9eb-3fc4-9276-55e3-89a6d6dd5005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.157816] env[68244]: DEBUG nova.network.neutron [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.280336] env[68244]: INFO nova.compute.manager [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Took 15.55 seconds to build instance. [ 1189.321237] env[68244]: DEBUG oslo_concurrency.lockutils [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] Releasing lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.321467] env[68244]: DEBUG nova.compute.manager [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Received event network-changed-42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1189.321639] env[68244]: DEBUG nova.compute.manager [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Refreshing instance network info cache due to event network-changed-42390128-dc00-4c43-bb63-04d49b817a2f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1189.321847] env[68244]: DEBUG oslo_concurrency.lockutils [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] Acquiring lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.321987] env[68244]: DEBUG oslo_concurrency.lockutils [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] Acquired lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.322157] env[68244]: DEBUG nova.network.neutron [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Refreshing network info cache for port 42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1189.341038] env[68244]: DEBUG nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1189.353270] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781230, 'name': Rename_Task, 'duration_secs': 0.146719} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.353539] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1189.353786] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3940da1-3b5f-4c56-9631-066ee103126b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.360282] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1189.360282] env[68244]: value = "task-2781231" [ 1189.360282] env[68244]: _type = "Task" [ 1189.360282] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.368141] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781231, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.556860] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f2d9eb-3fc4-9276-55e3-89a6d6dd5005, 'name': SearchDatastore_Task, 'duration_secs': 0.014284} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.557161] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.557396] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1189.557621] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.557768] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.557942] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1189.558213] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0189a86-01c5-402b-953f-e30662e4ecfd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.570860] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1189.571056] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1189.571760] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fd02ea8-d57c-4b70-b20b-cc257c22276f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.576973] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1189.576973] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c17466-f782-aeb8-8471-a81b1c87a9a7" [ 1189.576973] env[68244]: _type = "Task" [ 1189.576973] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.584940] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c17466-f782-aeb8-8471-a81b1c87a9a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.661153] env[68244]: INFO nova.compute.manager [-] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Took 1.31 seconds to deallocate network for instance. [ 1189.782155] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1a899d3-0b5a-46dc-aeae-4c2a0c5a1f17 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "375c4371-3537-4a94-987e-0f6f72a690b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.073s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.870316] env[68244]: DEBUG oslo_vmware.api [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781231, 'name': PowerOnVM_Task, 'duration_secs': 0.46188} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.871364] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.871662] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.873209] env[68244]: INFO nova.compute.claims [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1189.875824] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1189.876013] env[68244]: INFO nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Took 6.83 seconds to spawn the instance on the hypervisor. [ 1189.876591] env[68244]: DEBUG nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.877323] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf2a5f4-6ee0-4bee-bd9d-f5aa0e8c0c46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.037062] env[68244]: DEBUG nova.network.neutron [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Updated VIF entry in instance network info cache for port 42390128-dc00-4c43-bb63-04d49b817a2f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1190.037441] env[68244]: DEBUG nova.network.neutron [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Updating instance_info_cache with network_info: [{"id": "42390128-dc00-4c43-bb63-04d49b817a2f", "address": "fa:16:3e:22:00:0c", "network": {"id": "f81bb28e-7ee4-40c8-8843-2db3dd765fae", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689269356-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "207109eb01bd42b081cc66385789ab80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42390128-dc", "ovs_interfaceid": "42390128-dc00-4c43-bb63-04d49b817a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.087256] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c17466-f782-aeb8-8471-a81b1c87a9a7, 'name': SearchDatastore_Task, 'duration_secs': 0.02482} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.088050] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1823ece5-2302-4d4e-8887-1375c6d0344e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.093012] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1190.093012] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52962912-1d60-0691-5cdf-2969903e952b" [ 1190.093012] env[68244]: _type = "Task" [ 1190.093012] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.103727] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52962912-1d60-0691-5cdf-2969903e952b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.121713] env[68244]: DEBUG nova.compute.manager [req-6886e754-38cb-47ce-82b6-44bdca23217b req-e599a126-8e82-4edf-b44e-f9396288e8da service nova] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Received event network-vif-deleted-5615ea2e-6d28-4e93-8c36-fce6a7bd1b07 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1190.167494] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.396846] env[68244]: INFO nova.compute.manager [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Took 15.65 seconds to build instance. [ 1190.540484] env[68244]: DEBUG oslo_concurrency.lockutils [req-ab2619be-32c6-48d4-9a85-8904d7f3bef6 req-9319f376-fcbb-45eb-bcb0-9b324a8ef1a2 service nova] Releasing lock "refresh_cache-77ba8e47-10bb-4630-bd89-067f5ad7bad9" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.604469] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52962912-1d60-0691-5cdf-2969903e952b, 'name': SearchDatastore_Task, 'duration_secs': 0.009895} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.604740] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.604990] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1190.605259] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d68d450-9005-407c-ac48-c523a3b5780d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.611602] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1190.611602] env[68244]: value = "task-2781232" [ 1190.611602] env[68244]: _type = "Task" [ 1190.611602] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.618227] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781232, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.899560] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8d2557c2-691a-4371-8fcb-bdddb55402a3 tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.163s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.063774] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a9b675-147a-421e-8c68-1e9c63f2a1bd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.073211] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408228a2-6eaa-41c7-8623-c14a65f15ac0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.108127] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c33ff9-83ba-4001-af08-7734b68fc644 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.120318] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6426b8e7-f5de-4c82-8382-311888f12b75 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.127414] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781232, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.142487] env[68244]: DEBUG nova.compute.provider_tree [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.623085] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781232, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519963} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.623386] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1191.623605] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1191.623851] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-907a609c-ae52-4249-ad16-ba4fb0de56e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.629977] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1191.629977] env[68244]: value = "task-2781234" [ 1191.629977] env[68244]: _type = "Task" [ 1191.629977] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.640426] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781234, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.646607] env[68244]: DEBUG nova.scheduler.client.report [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.745544] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.745796] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.746020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.746375] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.746569] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.748698] env[68244]: INFO nova.compute.manager [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Terminating instance [ 1192.139814] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781234, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.410935} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.140130] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1192.140889] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d833853-db35-46f6-91c7-4bc4b9dc8f44 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.154699] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.155262] env[68244]: DEBUG nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1192.166423] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.167768] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.167983] env[68244]: DEBUG nova.objects.instance [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'resources' on Instance uuid b036365a-87d7-44ea-b439-80f1fe0c5f61 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.168818] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7714f12f-72af-4149-8ee5-5f9271c26226 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.184368] env[68244]: DEBUG nova.compute.manager [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-changed-35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1192.184547] env[68244]: DEBUG nova.compute.manager [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing instance network info cache due to event network-changed-35117f70-8f49-457b-b347-f4aff8b3b1b3. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1192.184749] env[68244]: DEBUG oslo_concurrency.lockutils [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.184887] env[68244]: DEBUG oslo_concurrency.lockutils [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.185056] env[68244]: DEBUG nova.network.neutron [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing network info cache for port 35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1192.193065] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1192.193065] env[68244]: value = "task-2781235" [ 1192.193065] env[68244]: _type = "Task" [ 1192.193065] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.202019] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781235, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.252750] env[68244]: DEBUG nova.compute.manager [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1192.252988] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1192.253928] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495da864-910d-4244-a846-11e6d4ac8bd2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.261512] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1192.261775] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40bb7ee5-0874-4488-8ddf-91153477a411 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.268231] env[68244]: DEBUG oslo_vmware.api [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1192.268231] env[68244]: value = "task-2781236" [ 1192.268231] env[68244]: _type = "Task" [ 1192.268231] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.278543] env[68244]: DEBUG oslo_vmware.api [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.668988] env[68244]: DEBUG nova.compute.utils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1192.670450] env[68244]: DEBUG nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1192.670619] env[68244]: DEBUG nova.network.neutron [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1192.703601] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.717581] env[68244]: DEBUG nova.policy [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f93ab312c1f44d7877c43a7b101cb5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4349b19805a8498392649e1b825d5da7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1192.779397] env[68244]: DEBUG oslo_vmware.api [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781236, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.843912] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a76b38d-c7ae-43ec-947f-7a159fa9e83e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.854064] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce9717d-4553-49b1-9cfa-3beff6754693 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.887536] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9732513f-7cdf-4164-9c76-af89f0640470 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.895790] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810394e0-f5fc-4802-9b31-d71f934ee8d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.912274] env[68244]: DEBUG nova.compute.provider_tree [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.920760] env[68244]: DEBUG nova.network.neutron [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updated VIF entry in instance network info cache for port 35117f70-8f49-457b-b347-f4aff8b3b1b3. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.921297] env[68244]: DEBUG nova.network.neutron [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.002972] env[68244]: DEBUG nova.network.neutron [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Successfully created port: 45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1193.009141] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1193.009371] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559164', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'name': 'volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dfe017bb-d860-4da6-abe5-7e8d7a7dd05a', 'attached_at': '', 'detached_at': '', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'serial': '29f1c6a5-12c0-422d-9d39-cae96c725dbd'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1193.010229] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf74d19-1c23-4296-a185-a1fd022f3926 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.027353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08bd4c8-16c3-4e59-b283-a6bea1e2b8ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.052285] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd/volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1193.052592] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a628ae34-d9ee-4fad-9d95-4ba14c75ea2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.072013] env[68244]: DEBUG oslo_vmware.api [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1193.072013] env[68244]: value = "task-2781237" [ 1193.072013] env[68244]: _type = "Task" [ 1193.072013] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.080914] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.081317] env[68244]: DEBUG oslo_vmware.api [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781237, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.081551] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.174251] env[68244]: DEBUG nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1193.204067] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781235, 'name': ReconfigVM_Task, 'duration_secs': 0.784498} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.204406] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.204980] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f0f1ae1-9402-4a1f-adea-8a271ad4501d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.211883] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1193.211883] env[68244]: value = "task-2781238" [ 1193.211883] env[68244]: _type = "Task" [ 1193.211883] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.219809] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781238, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.281206] env[68244]: DEBUG oslo_vmware.api [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781236, 'name': PowerOffVM_Task, 'duration_secs': 0.582913} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.281481] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.281680] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1193.281990] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f06fe3f-ebcf-4b37-bad2-d8e43208bf1d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.348493] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.348791] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.348940] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Deleting the datastore file [datastore2] 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.349240] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79ff8d24-1535-481c-bee9-4788c90d87dc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.355739] env[68244]: DEBUG oslo_vmware.api [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for the task: (returnval){ [ 1193.355739] env[68244]: value = "task-2781240" [ 1193.355739] env[68244]: _type = "Task" [ 1193.355739] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.363797] env[68244]: DEBUG oslo_vmware.api [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.416045] env[68244]: DEBUG nova.scheduler.client.report [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.424283] env[68244]: DEBUG oslo_concurrency.lockutils [req-565fb9e1-f645-40a8-8fcf-74e09d9ad2ae req-3d768498-7c73-46a9-8b38-9924d147f4e2 service nova] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.581746] env[68244]: DEBUG oslo_vmware.api [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781237, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.584829] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.722259] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781238, 'name': Rename_Task, 'duration_secs': 0.207828} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.722550] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1193.722802] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-636bf077-e53f-4dfb-8c63-4e0f24fca16e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.729600] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1193.729600] env[68244]: value = "task-2781241" [ 1193.729600] env[68244]: _type = "Task" [ 1193.729600] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.737322] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.869493] env[68244]: DEBUG oslo_vmware.api [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Task: {'id': task-2781240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.286051} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.869493] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1193.869493] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1193.869493] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1193.869949] env[68244]: INFO nova.compute.manager [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1193.869949] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1193.870131] env[68244]: DEBUG nova.compute.manager [-] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1193.870131] env[68244]: DEBUG nova.network.neutron [-] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1193.921225] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.753s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.925428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.341s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.925701] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.925909] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1193.926974] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaeb2eb2-3ecf-4dbd-a822-816b3f4d03a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.935881] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5293b7f-c546-476c-a968-9f7718027fb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.950646] env[68244]: INFO nova.scheduler.client.report [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted allocations for instance b036365a-87d7-44ea-b439-80f1fe0c5f61 [ 1193.952171] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610a1096-47b8-47db-904e-d0bb991cd2cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.961781] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0962eb98-5a88-461b-9dec-2f8989fe3c63 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.991064] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180281MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1193.991304] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.991421] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.081929] env[68244]: DEBUG oslo_vmware.api [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781237, 'name': ReconfigVM_Task, 'duration_secs': 0.582989} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.082240] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd/volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1194.086960] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fa7e4c4-334a-4e56-8101-f1c6769c2b23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.102313] env[68244]: DEBUG oslo_vmware.api [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1194.102313] env[68244]: value = "task-2781242" [ 1194.102313] env[68244]: _type = "Task" [ 1194.102313] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.111531] env[68244]: DEBUG oslo_vmware.api [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781242, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.176270] env[68244]: DEBUG nova.compute.manager [req-adb62737-832a-4c90-af2c-dfe47e8e2c79 req-15d6ad20-8732-4c5f-b56e-b56f526f3136 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Received event network-vif-deleted-8419e732-58f6-46b8-8230-9288039f5ac7 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1194.176711] env[68244]: INFO nova.compute.manager [req-adb62737-832a-4c90-af2c-dfe47e8e2c79 req-15d6ad20-8732-4c5f-b56e-b56f526f3136 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Neutron deleted interface 8419e732-58f6-46b8-8230-9288039f5ac7; detaching it from the instance and deleting it from the info cache [ 1194.176711] env[68244]: DEBUG nova.network.neutron [req-adb62737-832a-4c90-af2c-dfe47e8e2c79 req-15d6ad20-8732-4c5f-b56e-b56f526f3136 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.183372] env[68244]: DEBUG nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1194.211654] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1194.211936] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.212188] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1194.212445] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.212641] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1194.213207] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1194.213207] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1194.213348] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1194.213647] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1194.213904] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1194.214148] env[68244]: DEBUG nova.virt.hardware [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1194.215345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccb02e4-e974-4f95-86c6-c1440f905fc6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.223714] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b173e7-97a6-4d0f-bc06-2ead91ee569c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.245838] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.371498] env[68244]: DEBUG nova.compute.manager [req-210cb7d2-5709-4d7c-a8ca-bd88bfd81b50 req-002860ee-96d0-49d3-b3c0-1b7849ce5dac service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Received event network-vif-plugged-45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1194.371790] env[68244]: DEBUG oslo_concurrency.lockutils [req-210cb7d2-5709-4d7c-a8ca-bd88bfd81b50 req-002860ee-96d0-49d3-b3c0-1b7849ce5dac service nova] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.372104] env[68244]: DEBUG oslo_concurrency.lockutils [req-210cb7d2-5709-4d7c-a8ca-bd88bfd81b50 req-002860ee-96d0-49d3-b3c0-1b7849ce5dac service nova] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.372341] env[68244]: DEBUG oslo_concurrency.lockutils [req-210cb7d2-5709-4d7c-a8ca-bd88bfd81b50 req-002860ee-96d0-49d3-b3c0-1b7849ce5dac service nova] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.372544] env[68244]: DEBUG nova.compute.manager [req-210cb7d2-5709-4d7c-a8ca-bd88bfd81b50 req-002860ee-96d0-49d3-b3c0-1b7849ce5dac service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] No waiting events found dispatching network-vif-plugged-45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1194.372748] env[68244]: WARNING nova.compute.manager [req-210cb7d2-5709-4d7c-a8ca-bd88bfd81b50 req-002860ee-96d0-49d3-b3c0-1b7849ce5dac service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Received unexpected event network-vif-plugged-45e09a65-5a50-4359-9154-fe0bfe7f221b for instance with vm_state building and task_state spawning. [ 1194.461018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-21e418d4-a0d5-4b2f-9d73-99d4e06bf1ea tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "b036365a-87d7-44ea-b439-80f1fe0c5f61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.227s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.464410] env[68244]: DEBUG nova.network.neutron [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Successfully updated port: 45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1194.612306] env[68244]: DEBUG oslo_vmware.api [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781242, 'name': ReconfigVM_Task, 'duration_secs': 0.166136} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.612624] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559164', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'name': 'volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dfe017bb-d860-4da6-abe5-7e8d7a7dd05a', 'attached_at': '', 'detached_at': '', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'serial': '29f1c6a5-12c0-422d-9d39-cae96c725dbd'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1194.623764] env[68244]: DEBUG nova.network.neutron [-] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.679681] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89a0d8b4-9ff2-4135-b8f4-c30649af1d52 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.688753] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da782e8-8ab7-4cf6-9d37-92166f3d4576 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.723150] env[68244]: DEBUG nova.compute.manager [req-adb62737-832a-4c90-af2c-dfe47e8e2c79 req-15d6ad20-8732-4c5f-b56e-b56f526f3136 service nova] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Detach interface failed, port_id=8419e732-58f6-46b8-8230-9288039f5ac7, reason: Instance 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1194.747644] env[68244]: DEBUG oslo_vmware.api [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781241, 'name': PowerOnVM_Task, 'duration_secs': 0.757807} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.747905] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1194.748188] env[68244]: INFO nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1194.748382] env[68244]: DEBUG nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1194.749192] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c16951e-e2cc-407f-8f86-9bef927c1ca4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.967022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.967022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.967163] env[68244]: DEBUG nova.network.neutron [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1195.021124] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.021774] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 75bec02f-82f7-4e8d-81da-3c511588be29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.021774] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance dfe017bb-d860-4da6-abe5-7e8d7a7dd05a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.021774] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.021774] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.021774] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 77ba8e47-10bb-4630-bd89-067f5ad7bad9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.021949] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 375c4371-3537-4a94-987e-0f6f72a690b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.022051] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.022173] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f1143201-5ee1-45be-b2b1-4314a26aa10a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.022289] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a9820dc4-f52e-453c-9acf-a6a0c9a23580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.022501] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1195.022656] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1195.127680] env[68244]: INFO nova.compute.manager [-] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Took 1.26 seconds to deallocate network for instance. [ 1195.169799] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192d41dc-6eb2-40b3-8c29-0cac4c826b50 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.180439] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aca54f2-8bb7-4f0f-95ee-41a3099152ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.212549] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c9b35a-8b77-4449-ab3c-7d4d63afd794 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.220613] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f818033d-794a-4b46-b7c8-1c47100f4b6e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.235523] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.270531] env[68244]: INFO nova.compute.manager [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Took 18.57 seconds to build instance. [ 1195.501275] env[68244]: DEBUG nova.network.neutron [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1195.632009] env[68244]: DEBUG nova.network.neutron [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Updating instance_info_cache with network_info: [{"id": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "address": "fa:16:3e:4f:35:02", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45e09a65-5a", "ovs_interfaceid": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.633991] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.664228] env[68244]: DEBUG nova.objects.instance [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.738783] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1195.772968] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7859206d-963a-45b5-83fa-9318ceb6face tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.084s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.792147] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.135800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.136974] env[68244]: DEBUG nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Instance network_info: |[{"id": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "address": "fa:16:3e:4f:35:02", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45e09a65-5a", "ovs_interfaceid": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1196.137475] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:35:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45e09a65-5a50-4359-9154-fe0bfe7f221b', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.145934] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1196.146180] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1196.146410] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20f68192-e6f4-4ed1-bbc8-fd759856b743 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.166657] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.166657] env[68244]: value = "task-2781243" [ 1196.166657] env[68244]: _type = "Task" [ 1196.166657] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.171674] env[68244]: DEBUG oslo_concurrency.lockutils [None req-040d8497-ed2c-49ea-b965-894cad07b73a tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.773s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.172674] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.381s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.172846] env[68244]: DEBUG nova.compute.manager [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1196.174139] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684ecd90-2044-4380-ac5a-5597c96340d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.179960] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781243, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.183684] env[68244]: DEBUG nova.compute.manager [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1196.184270] env[68244]: DEBUG nova.objects.instance [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.203024] env[68244]: DEBUG nova.compute.manager [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1196.203264] env[68244]: DEBUG nova.compute.manager [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing instance network info cache due to event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1196.203430] env[68244]: DEBUG oslo_concurrency.lockutils [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] Acquiring lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.203578] env[68244]: DEBUG oslo_concurrency.lockutils [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] Acquired lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.204194] env[68244]: DEBUG nova.network.neutron [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.244269] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1196.244269] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.253s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.244845] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.611s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.245047] env[68244]: DEBUG nova.objects.instance [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lazy-loading 'resources' on Instance uuid 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.246010] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.246164] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Cleaning up deleted instances with incomplete migration {{(pid=68244) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1196.362745] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.362979] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.397307] env[68244]: DEBUG nova.compute.manager [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Received event network-changed-45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1196.397553] env[68244]: DEBUG nova.compute.manager [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Refreshing instance network info cache due to event network-changed-45e09a65-5a50-4359-9154-fe0bfe7f221b. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1196.397773] env[68244]: DEBUG oslo_concurrency.lockutils [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] Acquiring lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.397919] env[68244]: DEBUG oslo_concurrency.lockutils [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] Acquired lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.398098] env[68244]: DEBUG nova.network.neutron [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Refreshing network info cache for port 45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.676567] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781243, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.750190] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.865714] env[68244]: DEBUG nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1196.893282] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce8aa49-99c7-40cb-930d-281b8029e219 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.905665] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca1f584-3abb-430d-8e53-938d808f4e58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.937580] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b479b00-6b4d-4f03-b908-cedea918f4eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.940862] env[68244]: DEBUG nova.network.neutron [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updated VIF entry in instance network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.941786] env[68244]: DEBUG nova.network.neutron [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099ae899-d6", "ovs_interfaceid": "099ae899-d602-45fd-bdcf-deda125a5d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.948092] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4a1091-5858-44be-97ac-2beaceccac70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.961658] env[68244]: DEBUG nova.compute.provider_tree [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.176970] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781243, 'name': CreateVM_Task, 'duration_secs': 0.553215} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.177154] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1197.177844] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.178023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.178353] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1197.178608] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fdba715-f39d-4399-ac08-e1d60a5cb977 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.183713] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1197.183713] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5262112b-ac28-1d6b-167f-f6118d9f3494" [ 1197.183713] env[68244]: _type = "Task" [ 1197.183713] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.191018] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5262112b-ac28-1d6b-167f-f6118d9f3494, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.194660] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1197.194902] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-275076c9-a0f1-46a7-87c4-e4c58252aced {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.200819] env[68244]: DEBUG oslo_vmware.api [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1197.200819] env[68244]: value = "task-2781244" [ 1197.200819] env[68244]: _type = "Task" [ 1197.200819] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.209498] env[68244]: DEBUG oslo_vmware.api [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.379471] env[68244]: DEBUG nova.network.neutron [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Updated VIF entry in instance network info cache for port 45e09a65-5a50-4359-9154-fe0bfe7f221b. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1197.379961] env[68244]: DEBUG nova.network.neutron [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Updating instance_info_cache with network_info: [{"id": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "address": "fa:16:3e:4f:35:02", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45e09a65-5a", "ovs_interfaceid": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.386979] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.448106] env[68244]: DEBUG oslo_concurrency.lockutils [req-8b8de83f-ce6a-4003-8292-8ab9fc137fcf req-b73278e8-75bf-4207-b89f-fb9b9f484bb1 service nova] Releasing lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.467264] env[68244]: DEBUG nova.scheduler.client.report [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1197.693991] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5262112b-ac28-1d6b-167f-f6118d9f3494, 'name': SearchDatastore_Task, 'duration_secs': 0.012384} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.694309] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.694539] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.694769] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.694917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.695111] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1197.695374] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e7ca9ff-1400-4110-be40-32f86a4b668b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.705395] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1197.705583] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1197.706573] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ff76485-d038-4af0-9a96-51dd7e762c2b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.711435] env[68244]: DEBUG oslo_vmware.api [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781244, 'name': PowerOffVM_Task, 'duration_secs': 0.183379} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.712017] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1197.712205] env[68244]: DEBUG nova.compute.manager [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1197.712912] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ad9c0c-631f-4ae2-9753-ac93c03ffe38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.716075] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1197.716075] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5274808f-162a-f0bc-5131-4b916bf26cce" [ 1197.716075] env[68244]: _type = "Task" [ 1197.716075] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.727780] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5274808f-162a-f0bc-5131-4b916bf26cce, 'name': SearchDatastore_Task, 'duration_secs': 0.010703} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.729047] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c51ca2b-97fa-4d71-8386-068c9092fc56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.734608] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1197.734608] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527638b5-8ed2-83c1-017b-07e96f0bd3d3" [ 1197.734608] env[68244]: _type = "Task" [ 1197.734608] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.742702] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527638b5-8ed2-83c1-017b-07e96f0bd3d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.882988] env[68244]: DEBUG oslo_concurrency.lockutils [req-bb5a3e98-8e0d-446c-9fbe-201a4dc8c071 req-89e73ddb-dde0-457a-9be8-a0fcbe238f2d service nova] Releasing lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.972363] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.974751] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.588s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.976431] env[68244]: INFO nova.compute.claims [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1197.991063] env[68244]: INFO nova.scheduler.client.report [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Deleted allocations for instance 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c [ 1198.226111] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c5ff7fcf-dbbd-4ad1-bd3b-36fd74985871 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.244959] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527638b5-8ed2-83c1-017b-07e96f0bd3d3, 'name': SearchDatastore_Task, 'duration_secs': 0.00857} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.245268] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.245517] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a9820dc4-f52e-453c-9acf-a6a0c9a23580/a9820dc4-f52e-453c-9acf-a6a0c9a23580.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1198.245763] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-665c3c1b-5bf8-4eeb-b409-f0f5a294411e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.252620] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1198.252620] env[68244]: value = "task-2781245" [ 1198.252620] env[68244]: _type = "Task" [ 1198.252620] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.260365] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.500035] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bd1a7683-8c78-4b52-b733-7dce2b010fee tempest-ServersNegativeTestMultiTenantJSON-552422335 tempest-ServersNegativeTestMultiTenantJSON-552422335-project-member] Lock "8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.754s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.762830] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781245, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.132122] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbf1130-af24-473a-a7ba-8213c88a6f24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.139844] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad51490-ad75-4803-90c6-a294ef5a0943 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.170525] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb6a7b4-5fc9-4eb4-8337-5c84be912a62 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.178025] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2433c9-fa5a-4332-bb3a-862af5e27be4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.191729] env[68244]: DEBUG nova.compute.provider_tree [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.248885] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.249169] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.249282] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.249475] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.249690] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.249853] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.249995] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1199.262424] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781245, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.669856} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.262658] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] a9820dc4-f52e-453c-9acf-a6a0c9a23580/a9820dc4-f52e-453c-9acf-a6a0c9a23580.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1199.262876] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1199.263129] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d023c1ba-c55f-493c-9170-f5002d09aa6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.269294] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1199.269294] env[68244]: value = "task-2781246" [ 1199.269294] env[68244]: _type = "Task" [ 1199.269294] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.277830] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781246, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.390476] env[68244]: DEBUG nova.objects.instance [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.697149] env[68244]: DEBUG nova.scheduler.client.report [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1199.779307] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781246, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061983} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.779571] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1199.780339] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9749dc85-057d-42af-aaf6-c49841faf8da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.803991] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] a9820dc4-f52e-453c-9acf-a6a0c9a23580/a9820dc4-f52e-453c-9acf-a6a0c9a23580.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1199.804267] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad5e683a-81cf-4d16-94a2-445448e654cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.823194] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1199.823194] env[68244]: value = "task-2781247" [ 1199.823194] env[68244]: _type = "Task" [ 1199.823194] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.830647] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781247, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.895965] env[68244]: DEBUG oslo_concurrency.lockutils [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.896168] env[68244]: DEBUG oslo_concurrency.lockutils [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.896346] env[68244]: DEBUG nova.network.neutron [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1199.896525] env[68244]: DEBUG nova.objects.instance [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'info_cache' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.202990] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.228s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.203453] env[68244]: DEBUG nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1200.334269] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781247, 'name': ReconfigVM_Task, 'duration_secs': 0.347582} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.334269] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Reconfigured VM instance instance-0000006a to attach disk [datastore2] a9820dc4-f52e-453c-9acf-a6a0c9a23580/a9820dc4-f52e-453c-9acf-a6a0c9a23580.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1200.335888] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f855084-4c13-4353-a909-7d0bb2411f92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.342839] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1200.342839] env[68244]: value = "task-2781248" [ 1200.342839] env[68244]: _type = "Task" [ 1200.342839] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.351342] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781248, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.399859] env[68244]: DEBUG nova.objects.base [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1200.708903] env[68244]: DEBUG nova.compute.utils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1200.710783] env[68244]: DEBUG nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1200.713176] env[68244]: DEBUG nova.network.neutron [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1200.748556] env[68244]: DEBUG nova.policy [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3151a146805a456da750a47964f86f2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a151f53070d94d08bf7e85617a6f5190', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1200.853728] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781248, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.016597] env[68244]: DEBUG nova.network.neutron [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Successfully created port: 7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1201.078640] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.078640] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Cleaning up deleted instances {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1201.216136] env[68244]: DEBUG nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1201.249898] env[68244]: DEBUG nova.network.neutron [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating instance_info_cache with network_info: [{"id": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "address": "fa:16:3e:fe:62:9d", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73356d6-fa", "ovs_interfaceid": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.355630] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781248, 'name': Rename_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.595051] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] There are 57 instances to clean {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1201.595248] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 8b659ee6-4d6d-4179-a7f2-3e0bd393ee4c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1201.753296] env[68244]: DEBUG oslo_concurrency.lockutils [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.854244] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781248, 'name': Rename_Task, 'duration_secs': 1.173288} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.854553] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1201.854803] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cecf279b-fa99-4cd0-bc8a-ab4c7d8c2ee1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.860899] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1201.860899] env[68244]: value = "task-2781249" [ 1201.860899] env[68244]: _type = "Task" [ 1201.860899] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.868213] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.099175] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b036365a-87d7-44ea-b439-80f1fe0c5f61] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1202.229209] env[68244]: DEBUG nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1202.274334] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1202.274605] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.274907] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1202.274984] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.275210] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1202.275603] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1202.275848] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1202.276015] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1202.276302] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1202.276354] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1202.276609] env[68244]: DEBUG nova.virt.hardware [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1202.277527] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1205b1e-01f9-4828-8851-86751e073f99 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.289253] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ca0c21-983d-4731-bbad-4f9b89c98776 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.371981] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781249, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.605409] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ae8211ae-82bb-4a69-aa27-e81de2a06abe] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1202.647268] env[68244]: DEBUG nova.compute.manager [req-206d4f77-aec7-4fc1-8bef-c5f2b91c2a97 req-7c82f63a-5cdf-46ed-b1d3-310ee3ac021a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Received event network-vif-plugged-7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1202.647496] env[68244]: DEBUG oslo_concurrency.lockutils [req-206d4f77-aec7-4fc1-8bef-c5f2b91c2a97 req-7c82f63a-5cdf-46ed-b1d3-310ee3ac021a service nova] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.647707] env[68244]: DEBUG oslo_concurrency.lockutils [req-206d4f77-aec7-4fc1-8bef-c5f2b91c2a97 req-7c82f63a-5cdf-46ed-b1d3-310ee3ac021a service nova] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.647873] env[68244]: DEBUG oslo_concurrency.lockutils [req-206d4f77-aec7-4fc1-8bef-c5f2b91c2a97 req-7c82f63a-5cdf-46ed-b1d3-310ee3ac021a service nova] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.648127] env[68244]: DEBUG nova.compute.manager [req-206d4f77-aec7-4fc1-8bef-c5f2b91c2a97 req-7c82f63a-5cdf-46ed-b1d3-310ee3ac021a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] No waiting events found dispatching network-vif-plugged-7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1202.648361] env[68244]: WARNING nova.compute.manager [req-206d4f77-aec7-4fc1-8bef-c5f2b91c2a97 req-7c82f63a-5cdf-46ed-b1d3-310ee3ac021a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Received unexpected event network-vif-plugged-7f74db30-2640-4e0b-9332-eecb85a1b8bc for instance with vm_state building and task_state spawning. [ 1202.738504] env[68244]: DEBUG nova.network.neutron [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Successfully updated port: 7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1202.764030] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1202.764030] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39289560-7be3-4224-8b9d-378274e2389c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.772009] env[68244]: DEBUG oslo_vmware.api [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1202.772009] env[68244]: value = "task-2781250" [ 1202.772009] env[68244]: _type = "Task" [ 1202.772009] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.779495] env[68244]: DEBUG oslo_vmware.api [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.871541] env[68244]: DEBUG oslo_vmware.api [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781249, 'name': PowerOnVM_Task, 'duration_secs': 0.512155} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.871793] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1202.872111] env[68244]: INFO nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Took 8.69 seconds to spawn the instance on the hypervisor. [ 1202.872212] env[68244]: DEBUG nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1202.872973] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0de0cec-5308-4e6d-bc87-9141c58cddf9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.107644] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 8d0fa6cd-c14f-49ab-9595-396a10b4639a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1203.240409] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.240650] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.240833] env[68244]: DEBUG nova.network.neutron [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1203.285972] env[68244]: DEBUG oslo_vmware.api [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781250, 'name': PowerOnVM_Task, 'duration_secs': 0.396598} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.286287] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1203.286481] env[68244]: DEBUG nova.compute.manager [None req-56457055-7d40-47b0-aa3f-802e948b1dd5 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1203.287263] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16fb300d-3470-4038-81fd-14823e0cabaa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.392030] env[68244]: INFO nova.compute.manager [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Took 13.54 seconds to build instance. [ 1203.610553] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d4fd9092-9081-4be0-b33d-c175be24f12e] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1203.772761] env[68244]: DEBUG nova.network.neutron [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1203.893482] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e47865f9-2077-49af-ac7f-146d2c7774d6 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.054s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.934362] env[68244]: DEBUG nova.network.neutron [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updating instance_info_cache with network_info: [{"id": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "address": "fa:16:3e:22:96:d3", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f74db30-26", "ovs_interfaceid": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.114085] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 10e67250-5ddc-430d-aac7-4e6bae0778e5] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1204.160643] env[68244]: DEBUG nova.compute.manager [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Received event network-changed-45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1204.160836] env[68244]: DEBUG nova.compute.manager [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Refreshing instance network info cache due to event network-changed-45e09a65-5a50-4359-9154-fe0bfe7f221b. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1204.161195] env[68244]: DEBUG oslo_concurrency.lockutils [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] Acquiring lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.161424] env[68244]: DEBUG oslo_concurrency.lockutils [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] Acquired lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.161608] env[68244]: DEBUG nova.network.neutron [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Refreshing network info cache for port 45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.436585] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.436921] env[68244]: DEBUG nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Instance network_info: |[{"id": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "address": "fa:16:3e:22:96:d3", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f74db30-26", "ovs_interfaceid": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1204.437382] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:96:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f74db30-2640-4e0b-9332-eecb85a1b8bc', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1204.444933] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1204.445161] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1204.445399] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d7abb10-87a9-49ad-a372-99a32d97d1b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.465198] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1204.465198] env[68244]: value = "task-2781251" [ 1204.465198] env[68244]: _type = "Task" [ 1204.465198] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.472560] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781251, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.618571] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 91422c89-601c-4e5f-b5b0-fa2639031d3e] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1204.676441] env[68244]: DEBUG nova.compute.manager [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Received event network-changed-7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1204.676441] env[68244]: DEBUG nova.compute.manager [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Refreshing instance network info cache due to event network-changed-7f74db30-2640-4e0b-9332-eecb85a1b8bc. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1204.676629] env[68244]: DEBUG oslo_concurrency.lockutils [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] Acquiring lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.676779] env[68244]: DEBUG oslo_concurrency.lockutils [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] Acquired lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.676945] env[68244]: DEBUG nova.network.neutron [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Refreshing network info cache for port 7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.878008] env[68244]: DEBUG nova.network.neutron [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Updated VIF entry in instance network info cache for port 45e09a65-5a50-4359-9154-fe0bfe7f221b. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.878454] env[68244]: DEBUG nova.network.neutron [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Updating instance_info_cache with network_info: [{"id": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "address": "fa:16:3e:4f:35:02", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45e09a65-5a", "ovs_interfaceid": "45e09a65-5a50-4359-9154-fe0bfe7f221b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.976806] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781251, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.122420] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 4eb691f4-567e-412c-ba04-792ee9a21135] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1205.371889] env[68244]: DEBUG nova.network.neutron [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updated VIF entry in instance network info cache for port 7f74db30-2640-4e0b-9332-eecb85a1b8bc. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.372360] env[68244]: DEBUG nova.network.neutron [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updating instance_info_cache with network_info: [{"id": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "address": "fa:16:3e:22:96:d3", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f74db30-26", "ovs_interfaceid": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.382218] env[68244]: DEBUG oslo_concurrency.lockutils [req-f92bd1d4-874f-4ece-a120-7273bb2fc7a7 req-60a706b0-47a6-4e5f-8afd-7452c21be966 service nova] Releasing lock "refresh_cache-a9820dc4-f52e-453c-9acf-a6a0c9a23580" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.477499] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781251, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.626126] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 5c4bb8d0-8135-4272-83c2-ef923ac52d4a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1205.875636] env[68244]: DEBUG oslo_concurrency.lockutils [req-01fbe370-d836-4959-899f-6c4186b1ea24 req-baf8e6dc-1027-4396-b24f-9bf082bfcc84 service nova] Releasing lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.977262] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781251, 'name': CreateVM_Task, 'duration_secs': 1.350139} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.977477] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1205.978189] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.978361] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.978693] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1205.978970] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b8007bc-86ba-4e33-97a3-8564083a7906 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.983194] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1205.983194] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52660003-36e4-9620-d8d1-38f556a82770" [ 1205.983194] env[68244]: _type = "Task" [ 1205.983194] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.990651] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52660003-36e4-9620-d8d1-38f556a82770, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.129925] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f9f6c504-f140-4c90-994b-d3ec2d148796] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1206.494529] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52660003-36e4-9620-d8d1-38f556a82770, 'name': SearchDatastore_Task, 'duration_secs': 0.041978} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.494915] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.495219] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1206.495504] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.495691] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.495882] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1206.496179] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae248b1f-c1b5-4bc4-b1e9-afcc863fb2c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.507795] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1206.507971] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1206.508709] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83050721-ea5c-4815-9a01-b7dd79b6fdec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.513886] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1206.513886] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5262bf0e-ded9-d093-e253-720904455e3d" [ 1206.513886] env[68244]: _type = "Task" [ 1206.513886] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.521055] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5262bf0e-ded9-d093-e253-720904455e3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.633091] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 788e77e1-a356-4342-9ff3-5ad13868fd77] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1207.024738] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5262bf0e-ded9-d093-e253-720904455e3d, 'name': SearchDatastore_Task, 'duration_secs': 0.050329} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.025552] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3458bc1d-b501-4dc8-914d-70ccf9bd3512 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.030501] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1207.030501] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b95b49-ab6b-6450-dd32-4f62ac35f7ec" [ 1207.030501] env[68244]: _type = "Task" [ 1207.030501] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.037504] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b95b49-ab6b-6450-dd32-4f62ac35f7ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.135942] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: fc75039c-f2d0-4d4b-9a82-b605b6ba63d5] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1207.541639] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b95b49-ab6b-6450-dd32-4f62ac35f7ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009249} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.541979] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.542160] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1207.542434] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c0660fd-ac35-4bec-bca5-7939ef24bc85 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.549524] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1207.549524] env[68244]: value = "task-2781252" [ 1207.549524] env[68244]: _type = "Task" [ 1207.549524] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.557247] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.640050] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: a50d505f-92f2-4759-ab8f-1bf4c9708b1a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1207.941979] env[68244]: INFO nova.compute.manager [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Rebuilding instance [ 1208.059886] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781252, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.107039] env[68244]: DEBUG nova.compute.manager [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1208.107875] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50fe3a8-f4d7-4be5-be68-0da61762cfe9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.142770] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 477da9d1-8550-48be-b243-519b4f0ca443] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1208.560984] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781252, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.646020] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: df935885-c313-473d-aa3a-ba81aa999554] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1209.061965] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781252, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.476914} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.062618] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1209.063360] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1209.063360] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de005488-0002-45c4-a5c6-1f3e1d5453c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.069597] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1209.069597] env[68244]: value = "task-2781253" [ 1209.069597] env[68244]: _type = "Task" [ 1209.069597] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.077748] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.120553] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1209.120875] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-087e3027-4c4f-44d8-85e5-020fe283bb01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.126937] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1209.126937] env[68244]: value = "task-2781254" [ 1209.126937] env[68244]: _type = "Task" [ 1209.126937] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.134885] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.148532] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: cd2c4986-2092-4bc5-94c6-222f036c5e83] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1209.579559] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055538} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.579968] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1209.580552] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9055875a-7d9b-48fc-8681-65e238844ce0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.602881] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1209.603139] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f5f5e38-3e6c-4f60-b7a5-fc8962b449b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.621814] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1209.621814] env[68244]: value = "task-2781255" [ 1209.621814] env[68244]: _type = "Task" [ 1209.621814] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.629721] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781255, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.636477] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781254, 'name': PowerOffVM_Task, 'duration_secs': 0.218246} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.636714] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1209.636937] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1209.637639] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc2e6a0-cfc4-448b-af24-d6c289dc75a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.643715] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1209.643970] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-388927d3-a296-41f1-b762-914773fab3d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.651921] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 1ba4f3f5-726e-482f-a821-d2ee1bbd4c33] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1209.706744] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1209.707031] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1209.707283] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleting the datastore file [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.707593] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc6c91b6-9b8c-4730-9ee1-462fbaffcd0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.714746] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1209.714746] env[68244]: value = "task-2781257" [ 1209.714746] env[68244]: _type = "Task" [ 1209.714746] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.722868] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781257, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.131872] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781255, 'name': ReconfigVM_Task, 'duration_secs': 0.297919} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.132161] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1210.132790] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38b9655e-3fe4-440f-8fd0-99014dac9d31 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.139696] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1210.139696] env[68244]: value = "task-2781258" [ 1210.139696] env[68244]: _type = "Task" [ 1210.139696] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.147147] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781258, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.155625] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: fc1cb1cf-3f8b-4517-a8d7-a3be2335b24f] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1210.225585] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781257, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157249} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.225881] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.226101] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1210.226652] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1210.650177] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781258, 'name': Rename_Task, 'duration_secs': 0.168861} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.650499] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1210.650692] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8b53bbb-1b75-4552-85b8-ec0df84f09c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.657032] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1210.657032] env[68244]: value = "task-2781259" [ 1210.657032] env[68244]: _type = "Task" [ 1210.657032] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.659935] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 246e079b-9fc1-442f-9c20-4e0c05e152e3] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1210.667038] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.163674] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 0c336f72-1cb9-468a-bf59-b0de937e1e94] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1211.169244] env[68244]: DEBUG oslo_vmware.api [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781259, 'name': PowerOnVM_Task, 'duration_secs': 0.478616} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.169723] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1211.169928] env[68244]: INFO nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1211.170179] env[68244]: DEBUG nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1211.170931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f20a3c3-2235-4238-a426-931ebafa1577 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.262628] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1211.262886] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1211.263055] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1211.263241] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1211.263392] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1211.263560] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1211.263770] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1211.263924] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1211.264111] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1211.264263] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1211.264435] env[68244]: DEBUG nova.virt.hardware [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1211.265312] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f562ef-ecea-44ce-8c8a-965eb2dbf1f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.273568] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ef3c6f-56f5-482b-8a78-a0a676c82eec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.292256] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:e4:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1211.300099] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1211.300413] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1211.300664] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1ec1417-a736-43f7-92ec-2c5112a88e34 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.319751] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1211.319751] env[68244]: value = "task-2781260" [ 1211.319751] env[68244]: _type = "Task" [ 1211.319751] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.326802] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781260, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.655622] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.655980] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.656062] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.656265] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.656434] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.658734] env[68244]: INFO nova.compute.manager [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Terminating instance [ 1211.666973] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f2e57bf9-05ee-49d8-846d-c3bf5920ae96] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1211.691402] env[68244]: INFO nova.compute.manager [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Took 14.32 seconds to build instance. [ 1211.830238] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781260, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.162655] env[68244]: DEBUG nova.compute.manager [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1212.162830] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.163728] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00509b28-b502-439b-be38-901f285b6bc9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.169393] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b84c2c08-651a-407d-89dd-177bc5d90313] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1212.173185] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1212.173664] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92e796cc-2b6a-4aab-a1f6-a97053baddea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.179830] env[68244]: DEBUG oslo_vmware.api [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1212.179830] env[68244]: value = "task-2781261" [ 1212.179830] env[68244]: _type = "Task" [ 1212.179830] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.188193] env[68244]: DEBUG oslo_vmware.api [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.193771] env[68244]: DEBUG oslo_concurrency.lockutils [None req-801661c2-fad4-4151-acfc-5c4985a4baa3 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.831s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.333207] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781260, 'name': CreateVM_Task, 'duration_secs': 0.728094} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.333411] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1212.334071] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.334241] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.334573] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1212.334830] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03a11767-1148-4f9e-bee9-e5cffc196c01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.339224] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1212.339224] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5221b351-1c93-a7d4-8451-3e650210572b" [ 1212.339224] env[68244]: _type = "Task" [ 1212.339224] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.349766] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5221b351-1c93-a7d4-8451-3e650210572b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.674531] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 9658b4e0-f4f9-4628-b700-19d94800961c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1212.692269] env[68244]: DEBUG oslo_vmware.api [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781261, 'name': PowerOffVM_Task, 'duration_secs': 0.19971} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.692269] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1212.692269] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1212.692269] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e11408dd-d59a-4043-b27c-d3c4654d6106 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.729161] env[68244]: DEBUG nova.compute.manager [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Received event network-changed-7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1212.729161] env[68244]: DEBUG nova.compute.manager [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Refreshing instance network info cache due to event network-changed-7f74db30-2640-4e0b-9332-eecb85a1b8bc. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1212.729161] env[68244]: DEBUG oslo_concurrency.lockutils [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] Acquiring lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.729368] env[68244]: DEBUG oslo_concurrency.lockutils [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] Acquired lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.729459] env[68244]: DEBUG nova.network.neutron [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Refreshing network info cache for port 7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1212.759340] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1212.759560] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1212.759739] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Deleting the datastore file [datastore2] da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1212.759997] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f8032d9-cf79-4978-937b-1108a912c784 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.766534] env[68244]: DEBUG oslo_vmware.api [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for the task: (returnval){ [ 1212.766534] env[68244]: value = "task-2781263" [ 1212.766534] env[68244]: _type = "Task" [ 1212.766534] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.774678] env[68244]: DEBUG oslo_vmware.api [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781263, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.850042] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5221b351-1c93-a7d4-8451-3e650210572b, 'name': SearchDatastore_Task, 'duration_secs': 0.016726} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.850411] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.850672] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1212.850941] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.851160] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.851379] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1212.851671] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d7f6541-c207-4306-8cee-22806f0400e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.858979] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1212.859169] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1212.859860] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0733b39-7412-4de3-9610-d7396e733bc0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.865273] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1212.865273] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520527c9-aef0-35cb-1f2b-ef92981c96c9" [ 1212.865273] env[68244]: _type = "Task" [ 1212.865273] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.872880] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520527c9-aef0-35cb-1f2b-ef92981c96c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.178066] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: aa5a373e-b34a-4f94-912b-0c7d20fc5b6c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1213.277951] env[68244]: DEBUG oslo_vmware.api [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Task: {'id': task-2781263, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128342} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.278094] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1213.278285] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1213.278481] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1213.278633] env[68244]: INFO nova.compute.manager [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1213.278923] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1213.279138] env[68244]: DEBUG nova.compute.manager [-] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1213.279236] env[68244]: DEBUG nova.network.neutron [-] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1213.377972] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520527c9-aef0-35cb-1f2b-ef92981c96c9, 'name': SearchDatastore_Task, 'duration_secs': 0.008237} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.378813] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aa5733b-1a4d-49e2-8fc6-f3f3ff568e4d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.383901] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1213.383901] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522613a1-7d53-77b3-1582-489a0817cbdc" [ 1213.383901] env[68244]: _type = "Task" [ 1213.383901] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.391648] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522613a1-7d53-77b3-1582-489a0817cbdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.432901] env[68244]: DEBUG nova.network.neutron [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updated VIF entry in instance network info cache for port 7f74db30-2640-4e0b-9332-eecb85a1b8bc. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1213.433289] env[68244]: DEBUG nova.network.neutron [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updating instance_info_cache with network_info: [{"id": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "address": "fa:16:3e:22:96:d3", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f74db30-26", "ovs_interfaceid": "7f74db30-2640-4e0b-9332-eecb85a1b8bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.681948] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 60c502f4-8c4b-433e-ad4f-9351048abe11] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1213.901576] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522613a1-7d53-77b3-1582-489a0817cbdc, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.901870] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.902223] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1213.902526] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b3a4d76-302a-4180-ba35-9a448ee7b29e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.910294] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1213.910294] env[68244]: value = "task-2781264" [ 1213.910294] env[68244]: _type = "Task" [ 1213.910294] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.916187] env[68244]: DEBUG nova.compute.manager [req-facdbd61-3198-47a3-a83e-e05a10566bf4 req-893313d1-9803-4e9b-be42-ad82d3103056 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Received event network-vif-deleted-ffb04675-4c65-4d62-ab42-459a01bb68b5 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1213.916385] env[68244]: INFO nova.compute.manager [req-facdbd61-3198-47a3-a83e-e05a10566bf4 req-893313d1-9803-4e9b-be42-ad82d3103056 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Neutron deleted interface ffb04675-4c65-4d62-ab42-459a01bb68b5; detaching it from the instance and deleting it from the info cache [ 1213.916542] env[68244]: DEBUG nova.network.neutron [req-facdbd61-3198-47a3-a83e-e05a10566bf4 req-893313d1-9803-4e9b-be42-ad82d3103056 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.923097] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.938911] env[68244]: DEBUG oslo_concurrency.lockutils [req-2800611e-b4a8-455f-883e-cd04c2c2ed71 req-62781845-0147-4dd8-b2c0-674eeeb9fc0a service nova] Releasing lock "refresh_cache-0597e8ed-2f24-44c7-ac92-06af34d6a4fa" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.185579] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 4fe60c1b-fbfb-4bf0-b52a-7920fa87f598] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1214.397309] env[68244]: DEBUG nova.network.neutron [-] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.420674] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467299} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.420924] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8db9dd76-3bc2-4f3b-b375-abcd665aaee8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.422696] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1214.422909] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1214.423268] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c44824e1-9cbd-4bba-8a85-4e2da84ef474 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.430975] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ffb7e8-c8e4-4fa6-9d8e-153e9a6acb2f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.444047] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1214.444047] env[68244]: value = "task-2781265" [ 1214.444047] env[68244]: _type = "Task" [ 1214.444047] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.463950] env[68244]: DEBUG nova.compute.manager [req-facdbd61-3198-47a3-a83e-e05a10566bf4 req-893313d1-9803-4e9b-be42-ad82d3103056 service nova] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Detach interface failed, port_id=ffb04675-4c65-4d62-ab42-459a01bb68b5, reason: Instance da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1214.467062] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.689487] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b92b8f2a-a51b-4c9b-82d4-04b3f09ed8a8] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1214.901024] env[68244]: INFO nova.compute.manager [-] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Took 1.62 seconds to deallocate network for instance. [ 1214.956064] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059741} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.956422] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1214.957130] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36446df3-579a-4a51-84ec-55699ea9199e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.978924] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1214.979225] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70d92bbb-6501-4a8c-b0a7-76d5c54eb297 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.998759] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1214.998759] env[68244]: value = "task-2781266" [ 1214.998759] env[68244]: _type = "Task" [ 1214.998759] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.006810] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.193684] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 54b9144a-f84a-4be2-b6de-c61af436ec4e] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1215.407143] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.407456] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.407685] env[68244]: DEBUG nova.objects.instance [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lazy-loading 'resources' on Instance uuid da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.508830] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781266, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.696841] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 91d45b22-7963-4615-8455-7d910a9a0fed] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1216.011741] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781266, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.036232] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f96b8e-5f60-4a6b-a353-7ab62891e0ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.043295] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7826ef45-fb95-4a5c-8f45-4f93cb106a5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.075255] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3b7d30-8e82-4f3a-8f38-623672bb5887 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.082293] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99cd454-769a-4882-97c7-6a645c6e2591 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.095241] env[68244]: DEBUG nova.compute.provider_tree [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.200075] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d51ca913-54ad-4e0d-8d4c-b2ec6d4a4c88] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1216.510678] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781266, 'name': ReconfigVM_Task, 'duration_secs': 1.187062} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.510967] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47/7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1216.511652] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14268bcc-3acd-49e3-acd9-47a9d7cb55e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.517833] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1216.517833] env[68244]: value = "task-2781267" [ 1216.517833] env[68244]: _type = "Task" [ 1216.517833] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.525297] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781267, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.598580] env[68244]: DEBUG nova.scheduler.client.report [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1216.704912] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ce2c5992-690a-4ab4-8dc1-86d99f8ca647] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1217.028060] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781267, 'name': Rename_Task, 'duration_secs': 0.138029} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.028060] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1217.029435] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-807bf960-3a95-457e-a240-aeafd1660282 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.034081] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1217.034081] env[68244]: value = "task-2781268" [ 1217.034081] env[68244]: _type = "Task" [ 1217.034081] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.041036] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.104402] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.697s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.122775] env[68244]: INFO nova.scheduler.client.report [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Deleted allocations for instance da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab [ 1217.207925] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 4e40bd1b-13f8-49bc-aaaa-79f5eb1bfc4c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1217.543749] env[68244]: DEBUG oslo_vmware.api [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781268, 'name': PowerOnVM_Task, 'duration_secs': 0.430954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.544032] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1217.544246] env[68244]: DEBUG nova.compute.manager [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1217.545029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba64cc03-33ca-4e93-be7f-f0a26060b572 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.631359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ccb5ea19-e396-40c7-b5cb-821ed5fd448f tempest-ServersTestFqdnHostnames-1415069434 tempest-ServersTestFqdnHostnames-1415069434-project-member] Lock "da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.975s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.711463] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: c70fb986-8396-4f11-98c4-1ed977a23bcd] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1218.062894] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.063177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.063354] env[68244]: DEBUG nova.objects.instance [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1218.214720] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 183ac01e-82b1-470e-9e8f-a8aefb4c64c3] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1218.717580] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b50ed409-296a-4b6d-81d2-f8cfc24de24e] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1219.071409] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8e85bd1e-2a68-44d4-a252-43f916895341 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.222813] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ed5b8ba3-c8f0-468f-85d1-f36179bfef32] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1219.724727] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 91232cad-54b3-45af-bb54-af268de182fa] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1220.228686] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 92ce8150-982b-4669-b27a-4afd5c85da86] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1220.732471] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: c73d39d9-1fb7-4ce7-8d60-9243bd6f519f] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1221.006779] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.007070] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.098029] env[68244]: DEBUG oslo_concurrency.lockutils [None req-041dbc65-91ea-47f1-9ab9-0ddfdd700540 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.098362] env[68244]: DEBUG oslo_concurrency.lockutils [None req-041dbc65-91ea-47f1-9ab9-0ddfdd700540 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.098711] env[68244]: DEBUG nova.objects.instance [None req-041dbc65-91ea-47f1-9ab9-0ddfdd700540 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'flavor' on Instance uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1221.236052] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d74a0d56-8656-429c-a703-fca87e07798f] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1221.510040] env[68244]: DEBUG nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1221.602429] env[68244]: DEBUG nova.objects.instance [None req-041dbc65-91ea-47f1-9ab9-0ddfdd700540 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'pci_requests' on Instance uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1221.739572] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 2487689d-7a83-49d7-be78-fbb946ebef8c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1222.032072] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.032383] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.033821] env[68244]: INFO nova.compute.claims [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1222.105197] env[68244]: DEBUG nova.objects.base [None req-041dbc65-91ea-47f1-9ab9-0ddfdd700540 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Object Instance<375c4371-3537-4a94-987e-0f6f72a690b8> lazy-loaded attributes: flavor,pci_requests {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1222.105445] env[68244]: DEBUG nova.network.neutron [None req-041dbc65-91ea-47f1-9ab9-0ddfdd700540 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1222.183237] env[68244]: DEBUG oslo_concurrency.lockutils [None req-041dbc65-91ea-47f1-9ab9-0ddfdd700540 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.085s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.242398] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: cedcff81-0010-4fa6-95bf-72a4dcac5427] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1222.745506] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 45ec526b-e9d8-4ea3-b0c8-af6da39b0158] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1223.156211] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0507a571-f115-4d00-8331-c1aea7b35ccc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.164033] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897b593f-0d63-4f32-9f17-3b6df943351a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.196653] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44945569-458c-4b45-bb4f-8b78bf82896a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.204547] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a519c8-e4f5-4f0b-96b1-e1e7151e38ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.217015] env[68244]: DEBUG nova.compute.provider_tree [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.248721] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ffa17045-fadf-47d7-9c3b-19d0d54de3fc] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1223.719507] env[68244]: DEBUG nova.scheduler.client.report [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1223.751664] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d46f6695-7a96-4e0b-b43a-236bcb4ec519] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1224.141413] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.141769] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.142320] env[68244]: DEBUG nova.objects.instance [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'flavor' on Instance uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1224.224715] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.192s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.225261] env[68244]: DEBUG nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1224.255329] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 085b318d-e704-46f9-89a6-679b8aa49f85] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1224.707722] env[68244]: DEBUG nova.objects.instance [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'pci_requests' on Instance uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1224.730955] env[68244]: DEBUG nova.compute.utils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1224.732179] env[68244]: DEBUG nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1224.758295] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b0b79f25-f97d-4d59-ae80-2f8c09201073] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1225.210469] env[68244]: DEBUG nova.objects.base [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Object Instance<375c4371-3537-4a94-987e-0f6f72a690b8> lazy-loaded attributes: flavor,pci_requests {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1225.210656] env[68244]: DEBUG nova.network.neutron [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1225.233777] env[68244]: DEBUG nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1225.260174] env[68244]: DEBUG nova.policy [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1225.261894] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 828865d7-d06a-4683-9149-987e6d9efbd9] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1225.529857] env[68244]: DEBUG nova.network.neutron [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Successfully created port: 81a1055a-a5f1-488f-aacc-d6c908d0af27 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1225.765132] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f579141b-1fac-4541-99c3-07644a0a358c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1226.244376] env[68244]: DEBUG nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1226.268543] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: cf869add-e829-4fbe-a0bf-7a4fbc0bcb4b] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1226.273224] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1226.273463] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1226.273625] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1226.273807] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1226.273950] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1226.274123] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1226.274346] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1226.274506] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1226.274672] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1226.274834] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1226.275015] env[68244]: DEBUG nova.virt.hardware [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1226.275952] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60f225d-f902-43be-ac3d-927b772131ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.286268] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051536a2-2b51-4578-b580-7d921f312d73 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.301046] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1226.306882] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Creating folder: Project (1f14ef886f844846bca057c7f994796d). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1226.307855] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab9f15ba-a86e-41f2-80b3-05571d9f91c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.319534] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Created folder: Project (1f14ef886f844846bca057c7f994796d) in parent group-v558876. [ 1226.319739] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Creating folder: Instances. Parent ref: group-v559168. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1226.319942] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c5e4321-8371-4753-841e-7938ea4cb2d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.331015] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Created folder: Instances in parent group-v559168. [ 1226.331260] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1226.331464] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1226.331678] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-927627cb-8742-4fe8-ae3b-af7e1ff1c7ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.348925] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1226.348925] env[68244]: value = "task-2781271" [ 1226.348925] env[68244]: _type = "Task" [ 1226.348925] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.356984] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781271, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.780709] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 7778c027-d4af-436c-a545-aa513c0b1127] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1226.859150] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781271, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.945737] env[68244]: DEBUG nova.compute.manager [req-c4988a81-fb89-443a-b275-88c49bd78312 req-e77f5b47-fe97-4fe2-aa41-98e3d8e87431 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-vif-plugged-81a1055a-a5f1-488f-aacc-d6c908d0af27 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1226.945892] env[68244]: DEBUG oslo_concurrency.lockutils [req-c4988a81-fb89-443a-b275-88c49bd78312 req-e77f5b47-fe97-4fe2-aa41-98e3d8e87431 service nova] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.946111] env[68244]: DEBUG oslo_concurrency.lockutils [req-c4988a81-fb89-443a-b275-88c49bd78312 req-e77f5b47-fe97-4fe2-aa41-98e3d8e87431 service nova] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.946287] env[68244]: DEBUG oslo_concurrency.lockutils [req-c4988a81-fb89-443a-b275-88c49bd78312 req-e77f5b47-fe97-4fe2-aa41-98e3d8e87431 service nova] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.946459] env[68244]: DEBUG nova.compute.manager [req-c4988a81-fb89-443a-b275-88c49bd78312 req-e77f5b47-fe97-4fe2-aa41-98e3d8e87431 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] No waiting events found dispatching network-vif-plugged-81a1055a-a5f1-488f-aacc-d6c908d0af27 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1226.946627] env[68244]: WARNING nova.compute.manager [req-c4988a81-fb89-443a-b275-88c49bd78312 req-e77f5b47-fe97-4fe2-aa41-98e3d8e87431 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received unexpected event network-vif-plugged-81a1055a-a5f1-488f-aacc-d6c908d0af27 for instance with vm_state active and task_state None. [ 1226.979009] env[68244]: DEBUG nova.network.neutron [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Successfully updated port: 81a1055a-a5f1-488f-aacc-d6c908d0af27 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1227.283908] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: e2099d6d-5ab7-4a3e-8034-a3b4fc422749] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1227.359743] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781271, 'name': CreateVM_Task, 'duration_secs': 0.525844} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.359910] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1227.360354] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.360520] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.360837] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1227.361303] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55a68834-5e8c-42b3-b6e0-2adae3d13cb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.365843] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1227.365843] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f77333-f2c9-d0ad-0b48-58cac4833f1a" [ 1227.365843] env[68244]: _type = "Task" [ 1227.365843] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.374784] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f77333-f2c9-d0ad-0b48-58cac4833f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.485721] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.485721] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.485721] env[68244]: DEBUG nova.network.neutron [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1227.787549] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 2aacd21f-d664-4267-8331-d3862f43d35b] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1227.876587] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f77333-f2c9-d0ad-0b48-58cac4833f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.010118} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.876877] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.877142] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1227.877388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.877536] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.877714] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.877972] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd289ba4-f219-4449-844c-7848444dbec5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.885927] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.886117] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1227.886805] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8377c36-4bfd-4e0e-9270-52cab448f9b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.891706] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1227.891706] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a5aaf0-6cf4-38af-e18c-5b8e35eb81bb" [ 1227.891706] env[68244]: _type = "Task" [ 1227.891706] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.898743] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a5aaf0-6cf4-38af-e18c-5b8e35eb81bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.019825] env[68244]: WARNING nova.network.neutron [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] 30f3eb09-0134-4d69-88c7-1e99db7c1d78 already exists in list: networks containing: ['30f3eb09-0134-4d69-88c7-1e99db7c1d78']. ignoring it [ 1228.258029] env[68244]: DEBUG nova.network.neutron [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "address": "fa:16:3e:94:d0:31", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81a1055a-a5", "ovs_interfaceid": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.290428] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 8f0e60c8-7029-4dd5-b615-aa2b5d115aa1] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1228.403506] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a5aaf0-6cf4-38af-e18c-5b8e35eb81bb, 'name': SearchDatastore_Task, 'duration_secs': 0.007622} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.404298] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e56b75-6312-49bc-a64f-eb181e25cbb1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.409449] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1228.409449] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524f64c3-21f6-787f-7846-1930fda7295b" [ 1228.409449] env[68244]: _type = "Task" [ 1228.409449] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.416942] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524f64c3-21f6-787f-7846-1930fda7295b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.761306] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.761974] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.762155] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.763014] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a85e931-ab45-4bf7-8b7d-6c38f529bd12 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.779193] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1228.779402] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.779560] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1228.779740] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.779884] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1228.780047] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1228.780252] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1228.780410] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1228.780573] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1228.780731] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1228.780897] env[68244]: DEBUG nova.virt.hardware [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1228.787087] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfiguring VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1228.787355] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76f151dc-71aa-4eb8-8838-97a46018453c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.799136] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 10957648-8618-4f2c-8b08-5468bca20cfc] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1228.805690] env[68244]: DEBUG oslo_vmware.api [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1228.805690] env[68244]: value = "task-2781272" [ 1228.805690] env[68244]: _type = "Task" [ 1228.805690] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.813055] env[68244]: DEBUG oslo_vmware.api [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781272, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.919863] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524f64c3-21f6-787f-7846-1930fda7295b, 'name': SearchDatastore_Task, 'duration_secs': 0.009449} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.921231] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.921231] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1228.921231] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-083dce3d-e600-4a50-b022-f410844fab0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.927413] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1228.927413] env[68244]: value = "task-2781273" [ 1228.927413] env[68244]: _type = "Task" [ 1228.927413] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.940015] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.973050] env[68244]: DEBUG nova.compute.manager [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-changed-81a1055a-a5f1-488f-aacc-d6c908d0af27 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1228.973250] env[68244]: DEBUG nova.compute.manager [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing instance network info cache due to event network-changed-81a1055a-a5f1-488f-aacc-d6c908d0af27. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1228.973467] env[68244]: DEBUG oslo_concurrency.lockutils [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.973599] env[68244]: DEBUG oslo_concurrency.lockutils [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.973744] env[68244]: DEBUG nova.network.neutron [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing network info cache for port 81a1055a-a5f1-488f-aacc-d6c908d0af27 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1229.302549] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f2e28e22-ea4c-44c1-a6c9-c14b56fbf47f] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1229.315066] env[68244]: DEBUG oslo_vmware.api [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.437564] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.414365} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.438126] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1229.438126] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1229.438300] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db4c4369-37f8-41dc-b48e-b98e48ae31d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.445645] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1229.445645] env[68244]: value = "task-2781274" [ 1229.445645] env[68244]: _type = "Task" [ 1229.445645] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.453548] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.660025] env[68244]: DEBUG nova.network.neutron [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updated VIF entry in instance network info cache for port 81a1055a-a5f1-488f-aacc-d6c908d0af27. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1229.660466] env[68244]: DEBUG nova.network.neutron [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "address": "fa:16:3e:94:d0:31", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81a1055a-a5", "ovs_interfaceid": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.806387] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: e8655168-1fe8-4590-90a3-2ad9438d7761] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1229.817133] env[68244]: DEBUG oslo_vmware.api [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781272, 'name': ReconfigVM_Task, 'duration_secs': 0.697027} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.817669] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.817880] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfigured VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1229.955252] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068524} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.955510] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1229.956268] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b584c2-7098-4d5a-a318-b830f536466d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.975517] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1229.975750] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6df88f13-bf52-455e-a283-a6e29d073e16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.994233] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1229.994233] env[68244]: value = "task-2781275" [ 1229.994233] env[68244]: _type = "Task" [ 1229.994233] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.005010] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781275, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.163514] env[68244]: DEBUG oslo_concurrency.lockutils [req-62daef70-3b88-49dc-84af-af2e7c02d677 req-80455827-abd2-447d-96cd-11be894b1212 service nova] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.322820] env[68244]: DEBUG oslo_concurrency.lockutils [None req-891ec72e-3ddb-478f-a56e-46cac4815ba1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.181s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.504703] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781275, 'name': ReconfigVM_Task, 'duration_secs': 0.271703} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.504987] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1230.505608] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7816d29c-470e-41d9-86e9-967088351f71 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.512464] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1230.512464] env[68244]: value = "task-2781276" [ 1230.512464] env[68244]: _type = "Task" [ 1230.512464] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.521356] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781276, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.021826] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781276, 'name': Rename_Task, 'duration_secs': 0.138172} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.022109] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1231.022368] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdacb6aa-582d-4564-855c-66defcc2d252 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.028433] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1231.028433] env[68244]: value = "task-2781277" [ 1231.028433] env[68244]: _type = "Task" [ 1231.028433] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.035842] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.538804] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781277, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.039902] env[68244]: DEBUG oslo_vmware.api [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781277, 'name': PowerOnVM_Task, 'duration_secs': 0.97874} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.040196] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1232.040393] env[68244]: INFO nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Took 5.80 seconds to spawn the instance on the hypervisor. [ 1232.040613] env[68244]: DEBUG nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1232.041662] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822567b9-e8d4-4dab-8916-c652c797096d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.106785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.107077] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.107450] env[68244]: DEBUG nova.objects.instance [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'flavor' on Instance uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.307718] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.560204] env[68244]: INFO nova.compute.manager [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Took 10.55 seconds to build instance. [ 1232.708986] env[68244]: DEBUG nova.objects.instance [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'pci_requests' on Instance uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.813716] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.061940] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f67bfc46-69f5-4640-9ef1-e13b3085e7d8 tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.055s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.211587] env[68244]: DEBUG nova.objects.base [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Object Instance<375c4371-3537-4a94-987e-0f6f72a690b8> lazy-loaded attributes: flavor,pci_requests {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1233.211824] env[68244]: DEBUG nova.network.neutron [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1233.265680] env[68244]: INFO nova.compute.manager [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Rebuilding instance [ 1233.275360] env[68244]: DEBUG nova.policy [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1233.305838] env[68244]: DEBUG nova.compute.manager [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.306756] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f843bf5-5aaf-4f41-9966-58c7191d7322 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.318622] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Getting list of instances from cluster (obj){ [ 1233.318622] env[68244]: value = "domain-c8" [ 1233.318622] env[68244]: _type = "ClusterComputeResource" [ 1233.318622] env[68244]: } {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1233.319723] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32a1b28-4090-4510-a8ac-26aaaefc749c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.336961] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Got total of 10 instances {{(pid=68244) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1233.337582] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.337582] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid 75bec02f-82f7-4e8d-81da-3c511588be29 {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.337582] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.337736] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47 {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.337770] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid 77ba8e47-10bb-4630-bd89-067f5ad7bad9 {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.337895] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.338062] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.338203] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid a9820dc4-f52e-453c-9acf-a6a0c9a23580 {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.338351] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid 0597e8ed-2f24-44c7-ac92-06af34d6a4fa {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.338495] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Triggering sync for uuid 7ee2b5ee-58e5-4d31-952c-37a8411c6244 {{(pid=68244) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1233.338833] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.339061] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.339327] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "75bec02f-82f7-4e8d-81da-3c511588be29" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.339598] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "75bec02f-82f7-4e8d-81da-3c511588be29" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.339851] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.340048] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.340988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.340988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.340988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.340988] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.341172] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.341259] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "375c4371-3537-4a94-987e-0f6f72a690b8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.341535] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.341684] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.341904] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.342092] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.342318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.342493] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.342897] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.342897] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.343049] env[68244]: INFO nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] During sync_power_state the instance has a pending task (rebuilding). Skip. [ 1233.343211] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.344036] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853b4548-8163-48dd-baa6-a129d8fb0347 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.347017] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d423563-b4a6-4174-a1e2-400e03e53893 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.350430] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdb35d4-af2a-4c9a-abfe-ed02df10341c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.352918] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ce7491-d1e5-4e24-9609-0a460fef7393 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.355582] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1adb546-382a-4fed-8f69-a1f9f99d6e8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.358443] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a68b78-bbc5-4d3e-8ec5-8884a38530a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.361980] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7899f45b-975b-49ce-9ee8-9d12631ab359 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.365112] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475f5a6f-c5c1-4f85-b5cd-5655f2533832 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.368431] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e4933c-6130-4b05-8e1a-1ed38618c2be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.797222] env[68244]: WARNING oslo_messaging._drivers.amqpdriver [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1233.899036] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.560s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.899333] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.558s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.899657] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.559s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.899979] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "75bec02f-82f7-4e8d-81da-3c511588be29" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.560s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.904431] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.562s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.907069] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "375c4371-3537-4a94-987e-0f6f72a690b8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.566s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.907389] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.566s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.907760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.565s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.912207] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.572s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.299663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.300400] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.300400] env[68244]: INFO nova.compute.manager [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Shelving [ 1234.322542] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.322728] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e7d0a91-b5ab-4514-a46d-c5617954fd11 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.330783] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1234.330783] env[68244]: value = "task-2781278" [ 1234.330783] env[68244]: _type = "Task" [ 1234.330783] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.339240] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.661770] env[68244]: DEBUG nova.compute.manager [req-8a6578bd-74e2-4dee-adb8-14ae69fe54ce req-c39eb1e5-61d9-4e6a-ae18-1684f031bead service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-vif-plugged-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1234.661975] env[68244]: DEBUG oslo_concurrency.lockutils [req-8a6578bd-74e2-4dee-adb8-14ae69fe54ce req-c39eb1e5-61d9-4e6a-ae18-1684f031bead service nova] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.662457] env[68244]: DEBUG oslo_concurrency.lockutils [req-8a6578bd-74e2-4dee-adb8-14ae69fe54ce req-c39eb1e5-61d9-4e6a-ae18-1684f031bead service nova] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.662669] env[68244]: DEBUG oslo_concurrency.lockutils [req-8a6578bd-74e2-4dee-adb8-14ae69fe54ce req-c39eb1e5-61d9-4e6a-ae18-1684f031bead service nova] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.662809] env[68244]: DEBUG nova.compute.manager [req-8a6578bd-74e2-4dee-adb8-14ae69fe54ce req-c39eb1e5-61d9-4e6a-ae18-1684f031bead service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] No waiting events found dispatching network-vif-plugged-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1234.663070] env[68244]: WARNING nova.compute.manager [req-8a6578bd-74e2-4dee-adb8-14ae69fe54ce req-c39eb1e5-61d9-4e6a-ae18-1684f031bead service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received unexpected event network-vif-plugged-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2 for instance with vm_state active and task_state None. [ 1234.755980] env[68244]: DEBUG nova.network.neutron [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Successfully updated port: 60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1234.843775] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781278, 'name': PowerOffVM_Task, 'duration_secs': 0.11884} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.844130] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1234.845099] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1234.846353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc730c4-2bad-4595-8f38-74e01590fadf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.855588] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1234.855888] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8a65eaf-53ba-4da5-bd1d-f00875025006 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.881557] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1234.881861] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1234.882140] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Deleting the datastore file [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1234.882883] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ff798f5-7708-456a-9eb0-f9e1c459a797 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.890643] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1234.890643] env[68244]: value = "task-2781280" [ 1234.890643] env[68244]: _type = "Task" [ 1234.890643] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.901267] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.261903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.262239] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.262533] env[68244]: DEBUG nova.network.neutron [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1235.309469] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.310217] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f851d21-753f-470b-b51b-03cc620c22ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.317800] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1235.317800] env[68244]: value = "task-2781281" [ 1235.317800] env[68244]: _type = "Task" [ 1235.317800] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.325859] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.401292] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098129} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.401567] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1235.401757] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1235.401934] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1235.799527] env[68244]: WARNING nova.network.neutron [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] 30f3eb09-0134-4d69-88c7-1e99db7c1d78 already exists in list: networks containing: ['30f3eb09-0134-4d69-88c7-1e99db7c1d78']. ignoring it [ 1235.799747] env[68244]: WARNING nova.network.neutron [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] 30f3eb09-0134-4d69-88c7-1e99db7c1d78 already exists in list: networks containing: ['30f3eb09-0134-4d69-88c7-1e99db7c1d78']. ignoring it [ 1235.827700] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781281, 'name': PowerOffVM_Task, 'duration_secs': 0.205662} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.827955] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.828740] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5a898b-2b00-4e48-9e03-49ed39671659 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.849820] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4dd7c7-7427-4431-8cff-043f76010db3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.170821] env[68244]: DEBUG nova.network.neutron [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "address": "fa:16:3e:94:d0:31", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81a1055a-a5", "ovs_interfaceid": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2", "address": "fa:16:3e:a9:7d:76", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60ed727d-80", "ovs_interfaceid": "60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.362630] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1236.363111] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5811ad24-f688-4ee7-b77d-938f26650f70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.371534] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1236.371534] env[68244]: value = "task-2781282" [ 1236.371534] env[68244]: _type = "Task" [ 1236.371534] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.379189] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781282, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.438142] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1236.438421] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.438595] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.438780] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.438926] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.439084] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1236.439294] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1236.439449] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1236.439615] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1236.439779] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1236.439984] env[68244]: DEBUG nova.virt.hardware [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1236.440855] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8eab3f4-db81-432e-b8e2-874ca2aeae8c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.448668] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35cdd99-460c-4b2a-8236-7fd319bf5fa2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.461490] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1236.467083] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1236.467302] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1236.467495] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06e56d79-f6cd-4f9f-bb97-a96a6fe8ccf9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.483698] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1236.483698] env[68244]: value = "task-2781283" [ 1236.483698] env[68244]: _type = "Task" [ 1236.483698] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.492141] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781283, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.673616] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.674655] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.674975] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.676245] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705d4a74-2516-4df9-a61e-9a1dce5578a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.694818] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1236.695127] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.695302] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.695485] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.695631] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.695775] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1236.695978] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1236.696145] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1236.696311] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1236.696472] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1236.696645] env[68244]: DEBUG nova.virt.hardware [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1236.703161] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfiguring VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1236.704728] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c646fd78-aa8e-485f-8f5d-e78c56afe347 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.717558] env[68244]: DEBUG nova.compute.manager [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-changed-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1236.717785] env[68244]: DEBUG nova.compute.manager [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing instance network info cache due to event network-changed-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1236.717939] env[68244]: DEBUG oslo_concurrency.lockutils [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.718096] env[68244]: DEBUG oslo_concurrency.lockutils [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.718238] env[68244]: DEBUG nova.network.neutron [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Refreshing network info cache for port 60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1236.725009] env[68244]: DEBUG oslo_vmware.api [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1236.725009] env[68244]: value = "task-2781284" [ 1236.725009] env[68244]: _type = "Task" [ 1236.725009] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.733485] env[68244]: DEBUG oslo_vmware.api [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781284, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.882946] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781282, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.993697] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781283, 'name': CreateVM_Task, 'duration_secs': 0.394666} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.993879] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1236.994388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.994578] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.994932] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1236.995253] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee40ce26-5a8e-4e1c-909b-fc8894c68a94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.999683] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1236.999683] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d8d0fb-d969-a651-bddd-433cb430e1fd" [ 1236.999683] env[68244]: _type = "Task" [ 1236.999683] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.007705] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d8d0fb-d969-a651-bddd-433cb430e1fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.235164] env[68244]: DEBUG oslo_vmware.api [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781284, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.381819] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781282, 'name': CreateSnapshot_Task, 'duration_secs': 0.664453} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.382186] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1237.382840] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0309ad90-5757-419f-ab34-100b709c0924 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.419082] env[68244]: DEBUG nova.network.neutron [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updated VIF entry in instance network info cache for port 60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1237.419568] env[68244]: DEBUG nova.network.neutron [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "address": "fa:16:3e:94:d0:31", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81a1055a-a5", "ovs_interfaceid": "81a1055a-a5f1-488f-aacc-d6c908d0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2", "address": "fa:16:3e:a9:7d:76", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60ed727d-80", "ovs_interfaceid": "60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.510235] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d8d0fb-d969-a651-bddd-433cb430e1fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009885} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.510603] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.511016] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1237.511097] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.511250] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.511454] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1237.511730] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7eb71c4-3bb4-48e8-994e-69cf44ff462b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.519675] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1237.519867] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1237.520588] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c735cca2-f8f4-4c1b-88aa-1b947c98e79d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.525562] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1237.525562] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5201fdda-ebd0-d07d-8d96-c277988666d4" [ 1237.525562] env[68244]: _type = "Task" [ 1237.525562] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.532971] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5201fdda-ebd0-d07d-8d96-c277988666d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.735878] env[68244]: DEBUG oslo_vmware.api [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781284, 'name': ReconfigVM_Task, 'duration_secs': 0.56134} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.736477] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.736716] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfigured VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1237.902745] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1237.902949] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ebd1c40c-b838-4235-823a-f20e657ca827 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.911220] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1237.911220] env[68244]: value = "task-2781285" [ 1237.911220] env[68244]: _type = "Task" [ 1237.911220] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.918738] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781285, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.922288] env[68244]: DEBUG oslo_concurrency.lockutils [req-3cf6ceb0-98b3-48bc-a12e-cfa7eb72bef4 req-d79c44c8-be66-4b55-90c0-a3ec8c72defc service nova] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.036026] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5201fdda-ebd0-d07d-8d96-c277988666d4, 'name': SearchDatastore_Task, 'duration_secs': 0.008129} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.036864] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da6445e8-bf8c-447e-b4c8-007fd9c08995 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.042412] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1238.042412] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f3a8c0-2bbc-1951-c0ac-3aad0db67cce" [ 1238.042412] env[68244]: _type = "Task" [ 1238.042412] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.052225] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f3a8c0-2bbc-1951-c0ac-3aad0db67cce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.241742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f321c066-0b3f-4898-af3c-6fb20719faa9 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.134s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.421960] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781285, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.553251] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f3a8c0-2bbc-1951-c0ac-3aad0db67cce, 'name': SearchDatastore_Task, 'duration_secs': 0.009186} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.553516] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.554333] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1238.554526] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6a91884-6c46-4c82-95d0-0db2bbf790ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.561520] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1238.561520] env[68244]: value = "task-2781286" [ 1238.561520] env[68244]: _type = "Task" [ 1238.561520] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.569229] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.923945] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781285, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.072374] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781286, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.424188] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781285, 'name': CloneVM_Task, 'duration_secs': 1.3101} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.424487] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Created linked-clone VM from snapshot [ 1239.425270] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7810265-fef9-4221-a741-b5cd464a8503 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.432690] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Uploading image a77776b8-a4f1-499e-9cc0-b9dcbb5673ee {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1239.455900] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1239.455900] env[68244]: value = "vm-559173" [ 1239.455900] env[68244]: _type = "VirtualMachine" [ 1239.455900] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1239.456242] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-56564eb7-dd86-4d55-bdeb-f83fb92eeddc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.463399] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease: (returnval){ [ 1239.463399] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e1420e-f380-b0ac-83af-c6c40170db5f" [ 1239.463399] env[68244]: _type = "HttpNfcLease" [ 1239.463399] env[68244]: } obtained for exporting VM: (result){ [ 1239.463399] env[68244]: value = "vm-559173" [ 1239.463399] env[68244]: _type = "VirtualMachine" [ 1239.463399] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1239.463662] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the lease: (returnval){ [ 1239.463662] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e1420e-f380-b0ac-83af-c6c40170db5f" [ 1239.463662] env[68244]: _type = "HttpNfcLease" [ 1239.463662] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1239.470251] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1239.470251] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e1420e-f380-b0ac-83af-c6c40170db5f" [ 1239.470251] env[68244]: _type = "HttpNfcLease" [ 1239.470251] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1239.572314] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565828} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.572571] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1239.572868] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1239.573129] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d5dcd1b-f8fa-4495-bfc7-9debe8137044 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.578869] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1239.578869] env[68244]: value = "task-2781288" [ 1239.578869] env[68244]: _type = "Task" [ 1239.578869] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.586306] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781288, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.971511] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1239.971511] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e1420e-f380-b0ac-83af-c6c40170db5f" [ 1239.971511] env[68244]: _type = "HttpNfcLease" [ 1239.971511] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1239.971833] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1239.971833] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e1420e-f380-b0ac-83af-c6c40170db5f" [ 1239.971833] env[68244]: _type = "HttpNfcLease" [ 1239.971833] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1239.972463] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4302e06f-d767-429f-8da8-53e9b9f9c5f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.979134] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c4e7fa-b293-5f2f-e722-61b6056bff87/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1239.979308] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c4e7fa-b293-5f2f-e722-61b6056bff87/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1240.034427] env[68244]: DEBUG oslo_concurrency.lockutils [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.034659] env[68244]: DEBUG oslo_concurrency.lockutils [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.036062] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-81a1055a-a5f1-488f-aacc-d6c908d0af27" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.036252] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-81a1055a-a5f1-488f-aacc-d6c908d0af27" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.088303] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781288, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064059} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.088557] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1240.089331] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47b71db-1e2e-4ae2-84cb-e8a31b790d45 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.108796] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1240.111132] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45228755-638d-455b-8229-afd6365e0dca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.126076] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-61fa6b44-b968-4ead-a906-31b76a7d7ba9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.133196] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1240.133196] env[68244]: value = "task-2781289" [ 1240.133196] env[68244]: _type = "Task" [ 1240.133196] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.142349] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781289, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.538943] env[68244]: INFO nova.compute.manager [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Detaching volume 29f1c6a5-12c0-422d-9d39-cae96c725dbd [ 1240.542141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.542350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.543342] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99532150-94ad-4d3e-be83-8ba45c1d0dc6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.567690] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9c1b23-1dfe-45bb-9860-cd564217d1d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.597455] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfiguring VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1240.598716] env[68244]: INFO nova.virt.block_device [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Attempting to driver detach volume 29f1c6a5-12c0-422d-9d39-cae96c725dbd from mountpoint /dev/sdb [ 1240.599052] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1240.599286] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559164', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'name': 'volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dfe017bb-d860-4da6-abe5-7e8d7a7dd05a', 'attached_at': '', 'detached_at': '', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'serial': '29f1c6a5-12c0-422d-9d39-cae96c725dbd'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1240.599597] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1de4c943-c12c-4094-a70c-5bedaf0f083b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.614313] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb42707-5557-4113-8433-ebdb9a254793 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.644489] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e7ede3-e513-46ea-bec7-61ca18bbdfe2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.647693] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1240.647693] env[68244]: value = "task-2781290" [ 1240.647693] env[68244]: _type = "Task" [ 1240.647693] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.654949] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781289, 'name': ReconfigVM_Task, 'duration_secs': 0.27656} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.657295] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244/7ee2b5ee-58e5-4d31-952c-37a8411c6244.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1240.658167] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cd86ef8-0367-4844-ba15-5bb5077832c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.660434] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c706a9e8-a019-4804-a5e2-d56585067ffc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.667187] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.670736] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1240.670736] env[68244]: value = "task-2781291" [ 1240.670736] env[68244]: _type = "Task" [ 1240.670736] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.691571] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca08e25-0eb0-45e6-ac42-ef596e80a771 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.700218] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781291, 'name': Rename_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.711610] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] The volume has not been displaced from its original location: [datastore2] volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd/volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1240.717381] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1240.717779] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2491f606-846a-4693-a148-45a43d575c95 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.737220] env[68244]: DEBUG oslo_vmware.api [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1240.737220] env[68244]: value = "task-2781292" [ 1240.737220] env[68244]: _type = "Task" [ 1240.737220] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.745502] env[68244]: DEBUG oslo_vmware.api [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781292, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.158907] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.197288] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781291, 'name': Rename_Task, 'duration_secs': 0.220158} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.197649] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1241.198183] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77163af3-e026-49b1-bdc1-ea4bb953da8f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.204688] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1241.204688] env[68244]: value = "task-2781293" [ 1241.204688] env[68244]: _type = "Task" [ 1241.204688] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.212658] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.248056] env[68244]: DEBUG oslo_vmware.api [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781292, 'name': ReconfigVM_Task, 'duration_secs': 0.232684} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.248517] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1241.253675] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d730f310-7f4e-42bf-8c2c-5f1cc209a4d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.268828] env[68244]: DEBUG oslo_vmware.api [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1241.268828] env[68244]: value = "task-2781294" [ 1241.268828] env[68244]: _type = "Task" [ 1241.268828] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.277369] env[68244]: DEBUG oslo_vmware.api [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.660098] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.714374] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781293, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.779435] env[68244]: DEBUG oslo_vmware.api [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781294, 'name': ReconfigVM_Task, 'duration_secs': 0.158437} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.779949] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559164', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'name': 'volume-29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'dfe017bb-d860-4da6-abe5-7e8d7a7dd05a', 'attached_at': '', 'detached_at': '', 'volume_id': '29f1c6a5-12c0-422d-9d39-cae96c725dbd', 'serial': '29f1c6a5-12c0-422d-9d39-cae96c725dbd'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1242.036055] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.036317] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.159497] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.218704] env[68244]: DEBUG oslo_vmware.api [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781293, 'name': PowerOnVM_Task, 'duration_secs': 0.653552} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.219106] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1242.219421] env[68244]: DEBUG nova.compute.manager [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1242.220576] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e841d09-ba91-4a4e-98e7-4851c6675ac1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.402675] env[68244]: DEBUG nova.objects.instance [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.540026] env[68244]: DEBUG nova.compute.utils [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1242.660267] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.741375] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.741728] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.741928] env[68244]: DEBUG nova.objects.instance [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1243.042599] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.161071] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.180749] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.409824] env[68244]: DEBUG oslo_concurrency.lockutils [None req-521d37a2-e7eb-4efe-ad9c-cf42e895b541 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.375s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.410969] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.230s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.411174] env[68244]: DEBUG nova.compute.manager [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1243.412369] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb33b07-bd46-472c-97bb-e40ae592929f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.419248] env[68244]: DEBUG nova.compute.manager [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1243.419812] env[68244]: DEBUG nova.objects.instance [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.573226] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.573531] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.573803] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.574033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.574225] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.576564] env[68244]: INFO nova.compute.manager [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Terminating instance [ 1243.660565] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.750062] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fdbf102d-f398-48fb-ab17-43c95b9de6ce tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.080552] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "refresh_cache-7ee2b5ee-58e5-4d31-952c-37a8411c6244" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.080716] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquired lock "refresh_cache-7ee2b5ee-58e5-4d31-952c-37a8411c6244" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.080864] env[68244]: DEBUG nova.network.neutron [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1244.100635] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.100878] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.101174] env[68244]: INFO nova.compute.manager [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Attaching volume ac93ad2d-3a3f-478e-a07c-ccb0705735d0 to /dev/sdb [ 1244.133958] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fcd561-451c-4646-9992-e3e9e8f596d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.141553] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1aa3c0-111d-4d12-ad8d-f94f6e52d0b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.155476] env[68244]: DEBUG nova.virt.block_device [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Updating existing volume attachment record: 72e11a79-c178-4abc-a0ea-d19102b47144 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1244.165913] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.426922] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1244.427308] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52d18ce0-1402-49ea-b042-79fcf5624e9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.435195] env[68244]: DEBUG oslo_vmware.api [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1244.435195] env[68244]: value = "task-2781296" [ 1244.435195] env[68244]: _type = "Task" [ 1244.435195] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.446620] env[68244]: DEBUG oslo_vmware.api [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.636326] env[68244]: DEBUG nova.network.neutron [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1244.664135] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.696086] env[68244]: DEBUG nova.network.neutron [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.945654] env[68244]: DEBUG oslo_vmware.api [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781296, 'name': PowerOffVM_Task, 'duration_secs': 0.20115} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.945927] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1244.946142] env[68244]: DEBUG nova.compute.manager [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1244.946917] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40aa879b-4a8c-4aa3-add5-f3a01938d6ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.164188] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.198925] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Releasing lock "refresh_cache-7ee2b5ee-58e5-4d31-952c-37a8411c6244" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.199387] env[68244]: DEBUG nova.compute.manager [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1245.199582] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1245.200402] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9665262c-09e0-46d3-baa8-395bcd8e5c50 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.207279] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1245.207508] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c289922c-ec6c-43c8-b848-543f79228d6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.214395] env[68244]: DEBUG oslo_vmware.api [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1245.214395] env[68244]: value = "task-2781297" [ 1245.214395] env[68244]: _type = "Task" [ 1245.214395] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.221977] env[68244]: DEBUG oslo_vmware.api [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.458865] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0cdc266d-9c9d-4225-87e8-24124124e39c tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.666121] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.725918] env[68244]: DEBUG oslo_vmware.api [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781297, 'name': PowerOffVM_Task, 'duration_secs': 0.145909} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.725918] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1245.725918] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1245.725918] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bc9636f-2d3a-4560-9a4b-c487393d3d82 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.750724] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1245.750938] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1245.751140] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Deleting the datastore file [datastore2] 7ee2b5ee-58e5-4d31-952c-37a8411c6244 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1245.751413] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9054fd57-5e55-45dc-869a-07957f6e393c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.757718] env[68244]: DEBUG oslo_vmware.api [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for the task: (returnval){ [ 1245.757718] env[68244]: value = "task-2781299" [ 1245.757718] env[68244]: _type = "Task" [ 1245.757718] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.765768] env[68244]: DEBUG oslo_vmware.api [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.165912] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.268605] env[68244]: DEBUG oslo_vmware.api [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Task: {'id': task-2781299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217365} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.268961] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1246.269223] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1246.269780] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1246.269860] env[68244]: INFO nova.compute.manager [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1246.270086] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1246.270291] env[68244]: DEBUG nova.compute.manager [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1246.270388] env[68244]: DEBUG nova.network.neutron [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1246.287688] env[68244]: DEBUG nova.network.neutron [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1246.671060] env[68244]: DEBUG oslo_vmware.api [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781290, 'name': ReconfigVM_Task, 'duration_secs': 5.77942} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.671060] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.671060] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfigured VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1246.729994] env[68244]: DEBUG nova.objects.instance [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.790413] env[68244]: DEBUG nova.network.neutron [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.933047] env[68244]: DEBUG nova.compute.manager [req-3f34c47b-1b81-408b-ae92-b51b04247b48 req-041c0845-497a-4012-b6d9-1fb3440c5633 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-vif-deleted-81a1055a-a5f1-488f-aacc-d6c908d0af27 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1246.933291] env[68244]: INFO nova.compute.manager [req-3f34c47b-1b81-408b-ae92-b51b04247b48 req-041c0845-497a-4012-b6d9-1fb3440c5633 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Neutron deleted interface 81a1055a-a5f1-488f-aacc-d6c908d0af27; detaching it from the instance and deleting it from the info cache [ 1246.933590] env[68244]: DEBUG nova.network.neutron [req-3f34c47b-1b81-408b-ae92-b51b04247b48 req-041c0845-497a-4012-b6d9-1fb3440c5633 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2", "address": "fa:16:3e:a9:7d:76", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60ed727d-80", "ovs_interfaceid": "60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.236045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.236045] env[68244]: DEBUG oslo_concurrency.lockutils [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.236045] env[68244]: DEBUG nova.network.neutron [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1247.236045] env[68244]: DEBUG nova.objects.instance [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'info_cache' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.293342] env[68244]: INFO nova.compute.manager [-] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Took 1.02 seconds to deallocate network for instance. [ 1247.436833] env[68244]: DEBUG oslo_concurrency.lockutils [req-3f34c47b-1b81-408b-ae92-b51b04247b48 req-041c0845-497a-4012-b6d9-1fb3440c5633 service nova] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.437060] env[68244]: DEBUG oslo_concurrency.lockutils [req-3f34c47b-1b81-408b-ae92-b51b04247b48 req-041c0845-497a-4012-b6d9-1fb3440c5633 service nova] Acquired lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.437902] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e61a43-4180-42d3-b56a-8e55ee783e7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.464124] env[68244]: DEBUG oslo_concurrency.lockutils [req-3f34c47b-1b81-408b-ae92-b51b04247b48 req-041c0845-497a-4012-b6d9-1fb3440c5633 service nova] Releasing lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.464419] env[68244]: WARNING nova.compute.manager [req-3f34c47b-1b81-408b-ae92-b51b04247b48 req-041c0845-497a-4012-b6d9-1fb3440c5633 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Detach interface failed, port_id=81a1055a-a5f1-488f-aacc-d6c908d0af27, reason: No device with interface-id 81a1055a-a5f1-488f-aacc-d6c908d0af27 exists on VM: nova.exception.NotFound: No device with interface-id 81a1055a-a5f1-488f-aacc-d6c908d0af27 exists on VM [ 1247.507107] env[68244]: DEBUG nova.compute.manager [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-vif-deleted-60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1247.507315] env[68244]: INFO nova.compute.manager [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Neutron deleted interface 60ed727d-8033-4f5f-a3ad-2b8cb4fd77b2; detaching it from the instance and deleting it from the info cache [ 1247.507564] env[68244]: DEBUG nova.network.neutron [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.739243] env[68244]: DEBUG nova.objects.base [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1247.801289] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.801676] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.801913] env[68244]: DEBUG nova.objects.instance [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lazy-loading 'resources' on Instance uuid 7ee2b5ee-58e5-4d31-952c-37a8411c6244 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.925316] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.925547] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.925732] env[68244]: DEBUG nova.network.neutron [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1248.016483] env[68244]: DEBUG oslo_concurrency.lockutils [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.016677] env[68244]: DEBUG oslo_concurrency.lockutils [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Acquired lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.017494] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8454617-cf60-44a8-b9f8-b1850c55d638 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.035620] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0199fd16-c23d-47b2-93ea-762a27b53895 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.061892] env[68244]: DEBUG nova.virt.vmwareapi.vmops [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfiguring VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1248.062206] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c461bc2-84d5-48b8-8fdf-8a6753e5504b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.075560] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.081355] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Waiting for the task: (returnval){ [ 1248.081355] env[68244]: value = "task-2781301" [ 1248.081355] env[68244]: _type = "Task" [ 1248.081355] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.089739] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.471513] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d837d97-1b18-41ed-92d7-74e8d6f0b9c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.482292] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc44c4e-0635-4a7e-b95e-ffa0601fa67e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.516097] env[68244]: DEBUG nova.network.neutron [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating instance_info_cache with network_info: [{"id": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "address": "fa:16:3e:fe:62:9d", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73356d6-fa", "ovs_interfaceid": "e73356d6-fa2a-49f0-b862-b5f1644c7579", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.517875] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c055474a-1399-4f43-b32d-c922d92dada9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.529354] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0ea0fc-5fcc-4330-a7f7-566ed4dbea91 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.545851] env[68244]: DEBUG nova.compute.provider_tree [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.592625] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.704260] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1248.704260] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559174', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'name': 'volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9820dc4-f52e-453c-9acf-a6a0c9a23580', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'serial': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1248.704260] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243e0e98-fc2c-48b0-93be-fa0130631012 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.719996] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cde353-b4df-4ac1-9227-b8ec338cd5dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.744470] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0/volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1248.747119] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d07f937-c959-47da-9186-984267f1e2c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.768044] env[68244]: DEBUG oslo_vmware.api [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1248.768044] env[68244]: value = "task-2781302" [ 1248.768044] env[68244]: _type = "Task" [ 1248.768044] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.774475] env[68244]: DEBUG oslo_vmware.api [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.821259] env[68244]: DEBUG nova.network.neutron [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [{"id": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "address": "fa:16:3e:25:8e:de", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35117f70-8f", "ovs_interfaceid": "35117f70-8f49-457b-b347-f4aff8b3b1b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.023801] env[68244]: DEBUG oslo_concurrency.lockutils [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "refresh_cache-dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.049075] env[68244]: DEBUG nova.scheduler.client.report [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1249.092539] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.277367] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c4e7fa-b293-5f2f-e722-61b6056bff87/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1249.278342] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ac5587-e625-45ec-a36e-2567e2f5a26c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.284853] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c4e7fa-b293-5f2f-e722-61b6056bff87/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1249.285033] env[68244]: ERROR oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c4e7fa-b293-5f2f-e722-61b6056bff87/disk-0.vmdk due to incomplete transfer. [ 1249.287957] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-de249612-1a27-4220-aec0-603b41374fa0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.289585] env[68244]: DEBUG oslo_vmware.api [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781302, 'name': ReconfigVM_Task, 'duration_secs': 0.427086} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.289882] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0/volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1249.294995] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d893da8-5030-45aa-940e-e137d4e9c23b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.305472] env[68244]: DEBUG oslo_vmware.rw_handles [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c4e7fa-b293-5f2f-e722-61b6056bff87/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1249.305695] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Uploaded image a77776b8-a4f1-499e-9cc0-b9dcbb5673ee to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1249.307953] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1249.308505] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-daba9ae4-942f-43d3-ace4-a19fd1bc57d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.313180] env[68244]: DEBUG oslo_vmware.api [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1249.313180] env[68244]: value = "task-2781303" [ 1249.313180] env[68244]: _type = "Task" [ 1249.313180] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.316819] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1249.316819] env[68244]: value = "task-2781304" [ 1249.316819] env[68244]: _type = "Task" [ 1249.316819] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.323070] env[68244]: DEBUG oslo_vmware.api [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781303, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.325213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.330652] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781304, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.541899] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.541899] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.554722] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.753s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.575546] env[68244]: INFO nova.scheduler.client.report [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Deleted allocations for instance 7ee2b5ee-58e5-4d31-952c-37a8411c6244 [ 1249.592268] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.825953] env[68244]: DEBUG oslo_vmware.api [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781303, 'name': ReconfigVM_Task, 'duration_secs': 0.151373} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.829034] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559174', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'name': 'volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9820dc4-f52e-453c-9acf-a6a0c9a23580', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'serial': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1249.831676] env[68244]: DEBUG oslo_concurrency.lockutils [None req-00445488-528b-4266-8a53-e7665b19f0f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-375c4371-3537-4a94-987e-0f6f72a690b8-81a1055a-a5f1-488f-aacc-d6c908d0af27" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.795s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.832713] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781304, 'name': Destroy_Task, 'duration_secs': 0.395265} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.833216] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Destroyed the VM [ 1249.833497] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1249.833776] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-88496908-e2ad-4e03-ba1e-44a1f45926bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.840189] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1249.840189] env[68244]: value = "task-2781305" [ 1249.840189] env[68244]: _type = "Task" [ 1249.840189] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.850630] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781305, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.935529] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.935936] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.936215] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.936442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.936662] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.939035] env[68244]: INFO nova.compute.manager [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Terminating instance [ 1250.030844] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1250.031052] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b8ff105-3e72-4568-988e-0030f0d384b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.038378] env[68244]: DEBUG oslo_vmware.api [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1250.038378] env[68244]: value = "task-2781306" [ 1250.038378] env[68244]: _type = "Task" [ 1250.038378] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.044353] env[68244]: DEBUG nova.compute.utils [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1250.048926] env[68244]: DEBUG oslo_vmware.api [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.089347] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3feb80e6-dc32-487c-954a-126e8758276d tempest-ServersListShow2100Test-1936568138 tempest-ServersListShow2100Test-1936568138-project-member] Lock "7ee2b5ee-58e5-4d31-952c-37a8411c6244" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.515s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.095856] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.355971] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781305, 'name': RemoveSnapshot_Task, 'duration_secs': 0.357525} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.355971] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1250.357647] env[68244]: DEBUG nova.compute.manager [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1250.357647] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197966e1-5ca2-445e-8384-29c1f17bbe5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.443146] env[68244]: DEBUG nova.compute.manager [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1250.443515] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1250.444848] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7a8289-fe8c-41e3-a8ee-e8b4ed657de3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.455610] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1250.455977] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15321509-15a4-43b8-a317-28ee1c3842f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.463940] env[68244]: DEBUG oslo_vmware.api [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1250.463940] env[68244]: value = "task-2781307" [ 1250.463940] env[68244]: _type = "Task" [ 1250.463940] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.475145] env[68244]: DEBUG oslo_vmware.api [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.549183] env[68244]: DEBUG oslo_vmware.api [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781306, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.549918] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.593586] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.873942] env[68244]: INFO nova.compute.manager [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Shelve offloading [ 1250.875848] env[68244]: DEBUG nova.objects.instance [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'flavor' on Instance uuid a9820dc4-f52e-453c-9acf-a6a0c9a23580 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.973903] env[68244]: DEBUG oslo_vmware.api [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.049113] env[68244]: DEBUG oslo_vmware.api [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781306, 'name': PowerOnVM_Task, 'duration_secs': 0.525152} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.049383] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.049613] env[68244]: DEBUG nova.compute.manager [None req-69e2c5ba-68f4-4e42-94b8-49535ba23352 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1251.050441] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e378f4-ed69-4ff1-882e-088f8ac46a1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.094563] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.381077] env[68244]: DEBUG oslo_concurrency.lockutils [None req-75bd9a7d-1fde-495e-b4e0-b0051ca72123 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.280s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.382205] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1251.382757] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3a55bbb-d43e-4864-95d3-aeffb1bafbd2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.388713] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1251.388713] env[68244]: value = "task-2781308" [ 1251.388713] env[68244]: _type = "Task" [ 1251.388713] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.397011] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.478352] env[68244]: DEBUG oslo_vmware.api [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781307, 'name': PowerOffVM_Task, 'duration_secs': 0.614753} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.478352] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1251.478352] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1251.478352] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0058068-f328-42f9-b96b-b80964d35788 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.541220] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1251.541457] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1251.541750] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleting the datastore file [datastore2] 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1251.542090] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df79088b-f9c4-4c29-af6d-6eff6ea15236 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.550473] env[68244]: DEBUG oslo_vmware.api [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1251.550473] env[68244]: value = "task-2781310" [ 1251.550473] env[68244]: _type = "Task" [ 1251.550473] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.564841] env[68244]: DEBUG oslo_vmware.api [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.597865] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.607903] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.608294] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.608677] env[68244]: INFO nova.compute.manager [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Attaching volume d602d1e3-8fef-4f67-a2e3-751cb584e75f to /dev/sdb [ 1251.645423] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6ff428-7fe0-4ad3-b06b-b70784596bfd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.651935] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b4c5b5-26c7-4df7-b484-ebf11e6d39f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.665142] env[68244]: DEBUG nova.virt.block_device [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updating existing volume attachment record: 4b39ddc2-50a4-476d-bf23-ef6b1b9302c7 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1251.694917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.695202] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.897937] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1251.898246] env[68244]: DEBUG nova.compute.manager [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1251.898958] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6b1342-4959-4a48-a36c-7496fdb87e4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.904800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.904968] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.905162] env[68244]: DEBUG nova.network.neutron [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.059875] env[68244]: DEBUG oslo_vmware.api [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212062} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.060135] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1252.060325] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1252.060502] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1252.060673] env[68244]: INFO nova.compute.manager [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1252.060911] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1252.061113] env[68244]: DEBUG nova.compute.manager [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1252.061207] env[68244]: DEBUG nova.network.neutron [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1252.096527] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.200247] env[68244]: INFO nova.compute.manager [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Detaching volume ac93ad2d-3a3f-478e-a07c-ccb0705735d0 [ 1252.232031] env[68244]: INFO nova.virt.block_device [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Attempting to driver detach volume ac93ad2d-3a3f-478e-a07c-ccb0705735d0 from mountpoint /dev/sdb [ 1252.232031] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1252.232307] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559174', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'name': 'volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9820dc4-f52e-453c-9acf-a6a0c9a23580', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'serial': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1252.233186] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5129b25-8406-4e9a-be66-92331a30f9ac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.256234] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46afd0c7-f8e4-45cb-a1f7-f3ed6b20457c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.263026] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d03cc7-7819-4373-81bb-8960c1cc7ba5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.285154] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8889ea-8adc-4829-acab-f6a1bac1cb40 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.299397] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] The volume has not been displaced from its original location: [datastore2] volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0/volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1252.304761] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1252.305066] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94959f30-1c69-4679-b9d9-88695c254cc2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.322596] env[68244]: DEBUG oslo_vmware.api [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1252.322596] env[68244]: value = "task-2781314" [ 1252.322596] env[68244]: _type = "Task" [ 1252.322596] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.330275] env[68244]: DEBUG oslo_vmware.api [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.525739] env[68244]: DEBUG nova.compute.manager [req-71a0c84f-263f-40bc-bfcb-ccdc12392b58 req-525e467f-4adc-48fd-a2f0-c53f12496aa5 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Received event network-vif-deleted-0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1252.525739] env[68244]: INFO nova.compute.manager [req-71a0c84f-263f-40bc-bfcb-ccdc12392b58 req-525e467f-4adc-48fd-a2f0-c53f12496aa5 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Neutron deleted interface 0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22; detaching it from the instance and deleting it from the info cache [ 1252.525912] env[68244]: DEBUG nova.network.neutron [req-71a0c84f-263f-40bc-bfcb-ccdc12392b58 req-525e467f-4adc-48fd-a2f0-c53f12496aa5 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.596091] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.606104] env[68244]: DEBUG nova.network.neutron [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099ae899-d6", "ovs_interfaceid": "099ae899-d602-45fd-bdcf-deda125a5d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.611021] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1252.832356] env[68244]: DEBUG oslo_vmware.api [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781314, 'name': ReconfigVM_Task, 'duration_secs': 0.211786} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.832673] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1252.837400] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af7ddec2-f2dd-4def-9a7b-03a7e4727f01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.851802] env[68244]: DEBUG oslo_vmware.api [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1252.851802] env[68244]: value = "task-2781315" [ 1252.851802] env[68244]: _type = "Task" [ 1252.851802] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.859422] env[68244]: DEBUG oslo_vmware.api [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781315, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.006755] env[68244]: DEBUG nova.network.neutron [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.028872] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b54e3cfc-0648-4059-902c-cb3bc3cdb633 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.038082] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5bfca7-9c3a-46bf-a9f6-358c7ae652de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.070808] env[68244]: DEBUG nova.compute.manager [req-71a0c84f-263f-40bc-bfcb-ccdc12392b58 req-525e467f-4adc-48fd-a2f0-c53f12496aa5 service nova] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Detach interface failed, port_id=0f6b8a1d-f6d2-4ae9-8590-b4ab2a298f22, reason: Instance 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1253.096330] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.109035] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.349133] env[68244]: DEBUG nova.compute.manager [req-2f65fd7b-effd-4352-a489-fc145eeaff4f req-a81e272d-2b5c-4e2e-acc0-ffc56c92dcc3 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-vif-unplugged-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1253.349369] env[68244]: DEBUG oslo_concurrency.lockutils [req-2f65fd7b-effd-4352-a489-fc145eeaff4f req-a81e272d-2b5c-4e2e-acc0-ffc56c92dcc3 service nova] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.349638] env[68244]: DEBUG oslo_concurrency.lockutils [req-2f65fd7b-effd-4352-a489-fc145eeaff4f req-a81e272d-2b5c-4e2e-acc0-ffc56c92dcc3 service nova] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.349894] env[68244]: DEBUG oslo_concurrency.lockutils [req-2f65fd7b-effd-4352-a489-fc145eeaff4f req-a81e272d-2b5c-4e2e-acc0-ffc56c92dcc3 service nova] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.350084] env[68244]: DEBUG nova.compute.manager [req-2f65fd7b-effd-4352-a489-fc145eeaff4f req-a81e272d-2b5c-4e2e-acc0-ffc56c92dcc3 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] No waiting events found dispatching network-vif-unplugged-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1253.350262] env[68244]: WARNING nova.compute.manager [req-2f65fd7b-effd-4352-a489-fc145eeaff4f req-a81e272d-2b5c-4e2e-acc0-ffc56c92dcc3 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received unexpected event network-vif-unplugged-099ae899-d602-45fd-bdcf-deda125a5d3e for instance with vm_state shelved and task_state shelving_offloading. [ 1253.361620] env[68244]: DEBUG oslo_vmware.api [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781315, 'name': ReconfigVM_Task, 'duration_secs': 0.130743} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.361916] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559174', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'name': 'volume-ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a9820dc4-f52e-453c-9acf-a6a0c9a23580', 'attached_at': '', 'detached_at': '', 'volume_id': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0', 'serial': 'ac93ad2d-3a3f-478e-a07c-ccb0705735d0'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1253.433834] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1253.434784] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872f81e1-44b2-4d48-a337-bf35a2791ed0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.442552] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1253.442784] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a1e83b2-4c57-441f-ae0b-5b2b7cd58653 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.505117] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1253.505309] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1253.505492] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleting the datastore file [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.505762] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f829c9e2-664f-4acd-b689-f3a8e1de88c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.509210] env[68244]: INFO nova.compute.manager [-] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Took 1.45 seconds to deallocate network for instance. [ 1253.515278] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1253.515278] env[68244]: value = "task-2781317" [ 1253.515278] env[68244]: _type = "Task" [ 1253.515278] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.522902] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.596708] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.908075] env[68244]: DEBUG nova.objects.instance [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'flavor' on Instance uuid a9820dc4-f52e-453c-9acf-a6a0c9a23580 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1254.016794] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.017196] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.017317] env[68244]: DEBUG nova.objects.instance [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'resources' on Instance uuid 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1254.027877] env[68244]: DEBUG oslo_vmware.api [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.473241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.028707] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.028896] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1254.029088] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1254.050827] env[68244]: INFO nova.scheduler.client.report [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted allocations for instance f1143201-5ee1-45be-b2b1-4314a26aa10a [ 1254.097716] env[68244]: DEBUG oslo_vmware.api [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Task: {'id': task-2781301, 'name': ReconfigVM_Task, 'duration_secs': 5.733132} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.097955] env[68244]: DEBUG oslo_concurrency.lockutils [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] Releasing lock "375c4371-3537-4a94-987e-0f6f72a690b8" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.098186] env[68244]: DEBUG nova.virt.vmwareapi.vmops [req-af14f53c-8c71-431a-b229-6197a16d77dc req-22d290a0-0ba9-4ae0-a0c4-cf235e3f0fe7 service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Reconfigured VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1254.098665] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "375c4371-3537-4a94-987e-0f6f72a690b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.023s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.098882] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.099089] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.099255] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "375c4371-3537-4a94-987e-0f6f72a690b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.101252] env[68244]: INFO nova.compute.manager [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Terminating instance [ 1254.558620] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.606717] env[68244]: DEBUG nova.compute.manager [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1254.607042] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1254.608107] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c28eb38-f652-48ca-b1c1-5833973f04f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.615459] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1254.617632] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6abcde0c-6774-4cbe-a5aa-bfd07d30e294 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.624699] env[68244]: DEBUG oslo_vmware.api [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1254.624699] env[68244]: value = "task-2781319" [ 1254.624699] env[68244]: _type = "Task" [ 1254.624699] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.629171] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ab60e5-6716-4717-9fda-4831aabb92a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.636340] env[68244]: DEBUG oslo_vmware.api [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781319, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.639225] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4228a02d-61c3-4e51-adcb-2e2b9acfb568 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.671851] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d3c2fa-fa24-419b-aaa0-610ad7c63db8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.679390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74464ee9-148d-419d-a5af-3e404d93d1f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.692522] env[68244]: DEBUG nova.compute.provider_tree [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.917187] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9c4cfa4d-a32c-4f6a-a9e7-ee439ddddb03 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.222s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.077385] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.077668] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.134968] env[68244]: DEBUG oslo_vmware.api [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781319, 'name': PowerOffVM_Task, 'duration_secs': 0.279806} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.135325] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1255.135419] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1255.135666] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2f0bad8-63a1-44e5-8187-bbebd8c35775 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.196080] env[68244]: DEBUG nova.scheduler.client.report [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1255.200618] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1255.200828] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1255.201018] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleting the datastore file [datastore2] 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1255.201473] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21c260f9-eea1-4620-b1d6-7a3cd5b8879f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.207708] env[68244]: DEBUG oslo_vmware.api [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1255.207708] env[68244]: value = "task-2781321" [ 1255.207708] env[68244]: _type = "Task" [ 1255.207708] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.217958] env[68244]: DEBUG oslo_vmware.api [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.405630] env[68244]: DEBUG nova.compute.manager [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1255.405833] env[68244]: DEBUG nova.compute.manager [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing instance network info cache due to event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1255.406065] env[68244]: DEBUG oslo_concurrency.lockutils [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] Acquiring lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.406210] env[68244]: DEBUG oslo_concurrency.lockutils [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] Acquired lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.406368] env[68244]: DEBUG nova.network.neutron [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1255.580771] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.703074] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.686s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.706088] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.147s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.706088] env[68244]: DEBUG nova.objects.instance [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'resources' on Instance uuid f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.717087] env[68244]: DEBUG oslo_vmware.api [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160583} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.717335] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1255.717496] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1255.717674] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1255.717890] env[68244]: INFO nova.compute.manager [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1255.718144] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1255.718352] env[68244]: DEBUG nova.compute.manager [-] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1255.718460] env[68244]: DEBUG nova.network.neutron [-] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1255.721335] env[68244]: INFO nova.scheduler.client.report [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted allocations for instance 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47 [ 1255.955708] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.956049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.956277] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.956463] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.956633] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.958777] env[68244]: INFO nova.compute.manager [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Terminating instance [ 1256.122866] env[68244]: DEBUG nova.network.neutron [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updated VIF entry in instance network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1256.123162] env[68244]: DEBUG nova.network.neutron [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap099ae899-d6", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.208904] env[68244]: DEBUG nova.objects.instance [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'numa_topology' on Instance uuid f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1256.210801] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1256.210993] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1256.212380] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dceaebf3-c65f-4f90-858c-e4f9bc10041a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.236306] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be25b85-c8b0-4352-bd78-ef4c58d27198 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.239168] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc7483f7-29f6-4bb9-8d18-1f9405b6696a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.303s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.268736] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f/volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.269191] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc5c99de-8886-407b-8b96-235e76f04943 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.290473] env[68244]: DEBUG oslo_vmware.api [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1256.290473] env[68244]: value = "task-2781322" [ 1256.290473] env[68244]: _type = "Task" [ 1256.290473] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.299714] env[68244]: DEBUG oslo_vmware.api [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.408736] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.463108] env[68244]: DEBUG nova.compute.manager [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1256.463344] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1256.464317] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e80f573-b3a2-4fbe-8743-80cd5d296642 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.473986] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1256.473986] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-191e29bc-d150-49cd-b173-cc33a6fb47d5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.480023] env[68244]: DEBUG oslo_vmware.api [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1256.480023] env[68244]: value = "task-2781323" [ 1256.480023] env[68244]: _type = "Task" [ 1256.480023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.487850] env[68244]: DEBUG oslo_vmware.api [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.626639] env[68244]: DEBUG oslo_concurrency.lockutils [req-c612ce0c-396e-4918-a88d-7b7285ecb195 req-32d5f206-5bf7-4e69-b6fc-35603f7b0ae5 service nova] Releasing lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.688230] env[68244]: DEBUG nova.network.neutron [-] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.716498] env[68244]: DEBUG nova.objects.base [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1256.802843] env[68244]: DEBUG oslo_vmware.api [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781322, 'name': ReconfigVM_Task, 'duration_secs': 0.351365} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.804893] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f/volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1256.809734] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6c88ea3-11a3-47f7-90a4-dae0d06bb164 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.828282] env[68244]: DEBUG oslo_vmware.api [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1256.828282] env[68244]: value = "task-2781324" [ 1256.828282] env[68244]: _type = "Task" [ 1256.828282] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.834079] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064880b0-ae2b-4011-9109-19f0d7d341d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.840105] env[68244]: DEBUG oslo_vmware.api [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781324, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.844495] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eeb6463-a4ef-4fdf-b808-c74817411ac3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.874652] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddecef1c-0b15-46c8-9f63-73dde3f19c05 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.881621] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1424743e-b5e9-4c0c-880e-b936ee509a24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.896957] env[68244]: DEBUG nova.compute.provider_tree [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1256.987522] env[68244]: DEBUG oslo_vmware.api [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781323, 'name': PowerOffVM_Task, 'duration_secs': 0.199637} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.987784] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1256.987950] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1256.988224] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d2f9026-f752-4f55-a2eb-6afd00de6d20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.051186] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1257.051413] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1257.051654] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleting the datastore file [datastore2] a9820dc4-f52e-453c-9acf-a6a0c9a23580 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1257.051922] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e80109f7-39b8-4761-b64a-bcf2807404e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.058216] env[68244]: DEBUG oslo_vmware.api [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1257.058216] env[68244]: value = "task-2781326" [ 1257.058216] env[68244]: _type = "Task" [ 1257.058216] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.065927] env[68244]: DEBUG oslo_vmware.api [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.191383] env[68244]: INFO nova.compute.manager [-] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Took 1.47 seconds to deallocate network for instance. [ 1257.338660] env[68244]: DEBUG oslo_vmware.api [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781324, 'name': ReconfigVM_Task, 'duration_secs': 0.136973} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.338758] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1257.400536] env[68244]: DEBUG nova.scheduler.client.report [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1257.430829] env[68244]: DEBUG nova.compute.manager [req-e366190a-12e7-44af-b70d-2ff4986da698 req-6da03951-d525-4836-bda9-a707e0a1b7cb service nova] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Received event network-vif-deleted-35117f70-8f49-457b-b347-f4aff8b3b1b3 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1257.568417] env[68244]: DEBUG oslo_vmware.api [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.434968} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.568584] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1257.568793] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1257.568969] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1257.569156] env[68244]: INFO nova.compute.manager [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1257.569394] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1257.569594] env[68244]: DEBUG nova.compute.manager [-] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1257.569704] env[68244]: DEBUG nova.network.neutron [-] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1257.697838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.811927] env[68244]: DEBUG nova.compute.manager [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1257.904917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.199s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.907370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.327s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.907570] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.907728] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1257.908477] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.210s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.908698] env[68244]: DEBUG nova.objects.instance [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'resources' on Instance uuid 375c4371-3537-4a94-987e-0f6f72a690b8 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.910345] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d781ce-bb36-4246-b168-75a6470c7455 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.920704] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0c9309-dec8-4837-b00e-cd6eb87880d7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.938131] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766ffa9d-b15d-4f67-b7f7-088fd9143019 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.946329] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcaaa8c-b800-4b5a-ab45-09ab7c0eb8fc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.977170] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179347MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1257.977830] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.332556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.381710] env[68244]: DEBUG nova.objects.instance [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'flavor' on Instance uuid 0597e8ed-2f24-44c7-ac92-06af34d6a4fa {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1258.422227] env[68244]: DEBUG oslo_concurrency.lockutils [None req-924022e6-0702-421e-8aac-fd20ba13e753 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.122s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.423746] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.015s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.423746] env[68244]: INFO nova.compute.manager [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Unshelving [ 1258.495755] env[68244]: DEBUG nova.network.neutron [-] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.534871] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a474c4fe-f851-4171-b026-7b4c1ab93ec4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.543143] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9006e59-a809-4614-9497-aac8bbe3b6f5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.576597] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6e42df-a3c8-4fcf-8db1-d81dee5c1d32 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.583103] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e8738a-5891-4bb4-a9dd-5d0ac324ba42 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.596624] env[68244]: DEBUG nova.compute.provider_tree [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1258.887284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9db11695-68b7-4a60-bdb1-8fbd549431ab tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.279s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.997950] env[68244]: INFO nova.compute.manager [-] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Took 1.43 seconds to deallocate network for instance. [ 1259.116101] env[68244]: ERROR nova.scheduler.client.report [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [req-c76e78ae-d272-41fe-9fc2-530f47ea2951] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c76e78ae-d272-41fe-9fc2-530f47ea2951"}]} [ 1259.132941] env[68244]: DEBUG nova.scheduler.client.report [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1259.147947] env[68244]: DEBUG nova.scheduler.client.report [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1259.148225] env[68244]: DEBUG nova.compute.provider_tree [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1259.159089] env[68244]: DEBUG nova.scheduler.client.report [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1259.175858] env[68244]: DEBUG nova.scheduler.client.report [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1259.237377] env[68244]: INFO nova.compute.manager [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Rebuilding instance [ 1259.275827] env[68244]: DEBUG nova.compute.manager [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1259.276776] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b488f70c-a192-4e09-b8ad-868d21fba1cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.281053] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1404a58-a563-47cd-ab0b-bbee1df8695e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.291772] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6034dd60-c7fe-4476-8b55-65faa4258f53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.324946] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7312739-5f2c-4c57-b503-0cced1b87f86 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.332636] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b997eb3-0592-4a62-a3ac-81f186d66258 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.346928] env[68244]: DEBUG nova.compute.provider_tree [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1259.448572] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.457808] env[68244]: DEBUG nova.compute.manager [req-433a363a-c3b5-413c-9f13-34401e81bce7 req-f80cc5cf-84f6-48d4-9ee4-91d11458593d service nova] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Received event network-vif-deleted-45e09a65-5a50-4359-9154-fe0bfe7f221b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1259.508153] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.877993] env[68244]: DEBUG nova.scheduler.client.report [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 162 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1259.878202] env[68244]: DEBUG nova.compute.provider_tree [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 162 to 163 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1259.878325] env[68244]: DEBUG nova.compute.provider_tree [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1260.292060] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.292233] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-570e27bb-4074-448d-a81c-c06ac6697295 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.300147] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1260.300147] env[68244]: value = "task-2781327" [ 1260.300147] env[68244]: _type = "Task" [ 1260.300147] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.308036] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.383838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.476s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.388102] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.410s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.404028] env[68244]: INFO nova.scheduler.client.report [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted allocations for instance 375c4371-3537-4a94-987e-0f6f72a690b8 [ 1260.810076] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781327, 'name': PowerOffVM_Task, 'duration_secs': 0.246386} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.810359] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1260.861039] env[68244]: INFO nova.compute.manager [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Detaching volume d602d1e3-8fef-4f67-a2e3-751cb584e75f [ 1260.889596] env[68244]: INFO nova.virt.block_device [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Attempting to driver detach volume d602d1e3-8fef-4f67-a2e3-751cb584e75f from mountpoint /dev/sdb [ 1260.889855] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1260.890056] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1260.890911] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd49abf-a5d9-4523-ad70-9559a3876a10 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.919353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c87a471-d9ec-42c2-a5b2-bdad0b5d4476 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.922091] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1727a240-19fa-4abb-85aa-90db1474f2f2 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "375c4371-3537-4a94-987e-0f6f72a690b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.823s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.928429] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c271c4a-cf89-4a74-b9f3-2c2894baa153 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.950375] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71893b70-ea86-4cdb-aff4-20f3a73cf1f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.965660] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] The volume has not been displaced from its original location: [datastore2] volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f/volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1260.971036] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1260.972040] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-520ac3dc-f6c3-4336-8e1c-3429ec08ed51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.989887] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1260.989887] env[68244]: value = "task-2781328" [ 1260.989887] env[68244]: _type = "Task" [ 1260.989887] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.997389] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781328, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.399860] env[68244]: INFO nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating resource usage from migration 741481c1-9553-416e-9f78-9b4461def4fc [ 1261.416126] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 75bec02f-82f7-4e8d-81da-3c511588be29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1261.416289] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance dfe017bb-d860-4da6-abe5-7e8d7a7dd05a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1261.416414] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 77ba8e47-10bb-4630-bd89-067f5ad7bad9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1261.416557] env[68244]: WARNING nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a9820dc4-f52e-453c-9acf-a6a0c9a23580 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1261.416678] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 0597e8ed-2f24-44c7-ac92-06af34d6a4fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1261.499578] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781328, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.919766] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f1143201-5ee1-45be-b2b1-4314a26aa10a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1261.919901] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Migration 741481c1-9553-416e-9f78-9b4461def4fc is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1261.921146] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1261.921146] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1261.921146] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1262.001802] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781328, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.038317] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f881fe3b-12f1-43de-ac6f-01463a572532 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.045718] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e1d8c3-fd16-42c9-88bf-e0d460f15fac {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.077112] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28edd23f-02ee-4d6c-ae00-11d78df2bd31 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.084315] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876c84b3-95a3-41ca-be12-97a9a1439a60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.097496] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.501824] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781328, 'name': ReconfigVM_Task, 'duration_secs': 1.205054} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.502161] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1262.507430] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcc8c775-c482-4426-ae5f-3626476ab6bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.522673] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1262.522673] env[68244]: value = "task-2781329" [ 1262.522673] env[68244]: _type = "Task" [ 1262.522673] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.530533] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.600705] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1263.033527] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781329, 'name': ReconfigVM_Task, 'duration_secs': 0.133046} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.033859] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1263.105746] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1263.105968] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.719s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.106259] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.774s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.334426] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.334647] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.612038] env[68244]: INFO nova.compute.claims [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1263.836798] env[68244]: DEBUG nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1264.084198] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.084276] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ed9a831-cc4d-4bed-bf4d-82690e0c0bcc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.091980] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1264.091980] env[68244]: value = "task-2781330" [ 1264.091980] env[68244]: _type = "Task" [ 1264.091980] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.099837] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781330, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.118037] env[68244]: INFO nova.compute.resource_tracker [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating resource usage from migration 741481c1-9553-416e-9f78-9b4461def4fc [ 1264.227047] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075e9269-8326-4e42-b396-dacb9d62e9cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.234541] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47ae80e-fe44-4a7e-9019-7bf1d16046b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.263910] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489b7576-9a42-4e8a-9094-adc53506842e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.271509] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ffeaf7-13c9-45ed-aaca-8134ae9dce70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.284246] env[68244]: DEBUG nova.compute.provider_tree [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1264.354435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.602601] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1264.602992] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1264.603240] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1264.604021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dace241e-4828-464e-affd-3894b7d717a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.623046] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4bf518-a1c4-428c-af26-5856b56b0bf5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.628805] env[68244]: WARNING nova.virt.vmwareapi.driver [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1264.629082] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1264.629792] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705474c2-b907-4605-85cb-2e74ffa72e17 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.635558] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1264.635762] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83bef9d1-0739-4ad2-b243-e1081bd9e6c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.698203] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1264.698402] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1264.698530] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleting the datastore file [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1264.698802] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fa6b65b-22ef-4334-9c11-42253b678b80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.705079] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1264.705079] env[68244]: value = "task-2781332" [ 1264.705079] env[68244]: _type = "Task" [ 1264.705079] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.712330] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.787589] env[68244]: DEBUG nova.scheduler.client.report [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1265.103677] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.103853] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.104132] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.104366] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.104553] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.104720] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1265.215418] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144068} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.215744] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1265.215950] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1265.216143] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1265.292806] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.186s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.293051] env[68244]: INFO nova.compute.manager [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Migrating [ 1265.299318] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.851s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.299426] env[68244]: DEBUG nova.objects.instance [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'pci_requests' on Instance uuid f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.721419] env[68244]: INFO nova.virt.block_device [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Booting with volume d602d1e3-8fef-4f67-a2e3-751cb584e75f at /dev/sdb [ 1265.755198] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b375a93-7e53-4666-94bf-3d79f8cc213e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.764856] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135da363-2dea-4dda-929e-f0cbdd6df975 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.791866] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c6b3d09-101d-4ee9-9cd1-8e2bed2d1d97 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.799196] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fa29bd-5b7e-40a5-8804-304c5daf5c80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.810181] env[68244]: DEBUG nova.objects.instance [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'numa_topology' on Instance uuid f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.812600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.812757] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1265.812922] env[68244]: DEBUG nova.network.neutron [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1265.831647] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d718c5-6162-4fa0-a72f-940c573bc167 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.837638] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c8c168-5028-407c-8c64-53f1a7acdd05 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.850414] env[68244]: DEBUG nova.virt.block_device [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updating existing volume attachment record: 6974866d-6400-4d9e-b96f-28839c16439e {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1266.317294] env[68244]: INFO nova.compute.claims [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1266.518533] env[68244]: DEBUG nova.network.neutron [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.021186] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1267.435459] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8bcc8b-6c41-48bb-8e75-2e82f1d3b1ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.443206] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e63ff0-5539-4e68-a32b-c038aacb2b67 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.474954] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d857dc6-79d0-4346-805f-6c25220c6892 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.482392] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316e0527-02e2-4e1f-8dd9-c00d24cf7dbf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.495917] env[68244]: DEBUG nova.compute.provider_tree [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1267.961022] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1267.961022] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1267.961022] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1267.961260] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1267.961382] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1267.961531] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1267.961765] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1267.961946] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1267.962129] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1267.962293] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1267.962464] env[68244]: DEBUG nova.virt.hardware [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1267.963363] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544ff21f-3f24-434e-9b1d-a828ca1241ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.971080] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5c152a-c78c-439a-bce8-43d31bb4fc8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.983880] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:96:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f74db30-2640-4e0b-9332-eecb85a1b8bc', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1267.991165] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1267.991386] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1267.991581] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b634b6b-9ce5-4629-a9a6-e22cf294d7d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.005516] env[68244]: DEBUG nova.scheduler.client.report [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1268.013638] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1268.013638] env[68244]: value = "task-2781333" [ 1268.013638] env[68244]: _type = "Task" [ 1268.013638] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.020682] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781333, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.510605] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.211s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.512957] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.005s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.513171] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.514915] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.161s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.516443] env[68244]: INFO nova.compute.claims [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1268.527974] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781333, 'name': CreateVM_Task, 'duration_secs': 0.27794} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.528170] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1268.534363] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7eb7c0-53ff-4e2f-8f86-e49ad209451b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.537789] env[68244]: INFO nova.scheduler.client.report [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted allocations for instance a9820dc4-f52e-453c-9acf-a6a0c9a23580 [ 1268.539767] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.539947] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.541440] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1268.542022] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf0483c2-ac5d-4701-b40a-99565a105afc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.547514] env[68244]: INFO nova.network.neutron [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating port 099ae899-d602-45fd-bdcf-deda125a5d3e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1268.566461] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1268.566461] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526d9a48-0ee6-30dd-f0a9-4362cd7c6c18" [ 1268.566461] env[68244]: _type = "Task" [ 1268.566461] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.567619] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance 'a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1268.580303] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526d9a48-0ee6-30dd-f0a9-4362cd7c6c18, 'name': SearchDatastore_Task, 'duration_secs': 0.011316} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.580570] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.580820] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1268.581067] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.581216] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.581385] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1268.582099] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1ad48a4-866e-4e56-8732-44ac41335eb3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.589827] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1268.590072] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1268.590955] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aa23e5a-e3b1-4653-80e8-5085f4b2b790 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.596537] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1268.596537] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520bb384-a259-12bc-c1c4-109eb60b2751" [ 1268.596537] env[68244]: _type = "Task" [ 1268.596537] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.603859] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520bb384-a259-12bc-c1c4-109eb60b2751, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.049313] env[68244]: DEBUG oslo_concurrency.lockutils [None req-508f1096-0bb2-4469-9be0-e2bc40d6db43 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "a9820dc4-f52e-453c-9acf-a6a0c9a23580" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.093s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.073164] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1269.073771] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c10863fe-280d-4181-9e12-fbe50228d85d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.081131] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1269.081131] env[68244]: value = "task-2781334" [ 1269.081131] env[68244]: _type = "Task" [ 1269.081131] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.089479] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.105492] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520bb384-a259-12bc-c1c4-109eb60b2751, 'name': SearchDatastore_Task, 'duration_secs': 0.008119} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.106334] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c0fa424-7479-44cf-ade2-36aaad2f220f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.111239] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1269.111239] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523e74a2-e9fa-888d-cce6-7bdd26ed0087" [ 1269.111239] env[68244]: _type = "Task" [ 1269.111239] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.118925] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523e74a2-e9fa-888d-cce6-7bdd26ed0087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.593281] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781334, 'name': PowerOffVM_Task, 'duration_secs': 0.2058} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.593576] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1269.593732] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance 'a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1269.622338] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523e74a2-e9fa-888d-cce6-7bdd26ed0087, 'name': SearchDatastore_Task, 'duration_secs': 0.010241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.622663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.622907] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1269.623189] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6db5123a-133a-4ecc-afb3-3a1b6592eae6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.630669] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1269.630669] env[68244]: value = "task-2781335" [ 1269.630669] env[68244]: _type = "Task" [ 1269.630669] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.642656] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781335, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.646304] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bfe5bf-8b90-47be-9b24-d9a9cf782d77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.652718] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5062e18a-b854-429e-b9c2-13456e5d552f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.686476] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9564829-6031-497a-8a74-5bbbb9555cd1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.695326] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047dc50e-4a3b-4ecd-8d65-03d26bf3d5a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.709104] env[68244]: DEBUG nova.compute.provider_tree [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.970197] env[68244]: DEBUG nova.compute.manager [req-45ad9165-145b-4fef-9a60-7dbb752d8ca6 req-699433ff-a2c1-4ef9-84b7-79808479470c service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-vif-plugged-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1269.970814] env[68244]: DEBUG oslo_concurrency.lockutils [req-45ad9165-145b-4fef-9a60-7dbb752d8ca6 req-699433ff-a2c1-4ef9-84b7-79808479470c service nova] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.971209] env[68244]: DEBUG oslo_concurrency.lockutils [req-45ad9165-145b-4fef-9a60-7dbb752d8ca6 req-699433ff-a2c1-4ef9-84b7-79808479470c service nova] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.971415] env[68244]: DEBUG oslo_concurrency.lockutils [req-45ad9165-145b-4fef-9a60-7dbb752d8ca6 req-699433ff-a2c1-4ef9-84b7-79808479470c service nova] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.971665] env[68244]: DEBUG nova.compute.manager [req-45ad9165-145b-4fef-9a60-7dbb752d8ca6 req-699433ff-a2c1-4ef9-84b7-79808479470c service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] No waiting events found dispatching network-vif-plugged-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1269.971802] env[68244]: WARNING nova.compute.manager [req-45ad9165-145b-4fef-9a60-7dbb752d8ca6 req-699433ff-a2c1-4ef9-84b7-79808479470c service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received unexpected event network-vif-plugged-099ae899-d602-45fd-bdcf-deda125a5d3e for instance with vm_state shelved_offloaded and task_state spawning. [ 1270.057583] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.057789] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.058123] env[68244]: DEBUG nova.network.neutron [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1270.100462] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1270.100703] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1270.100860] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1270.101056] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1270.101207] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1270.101353] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1270.101556] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1270.101764] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1270.101975] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1270.102174] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1270.102347] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1270.107958] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b04cd90-eacb-4382-9bfe-63b0bca284df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.123633] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1270.123633] env[68244]: value = "task-2781336" [ 1270.123633] env[68244]: _type = "Task" [ 1270.123633] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.131895] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.141525] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781335, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458911} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.141792] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1270.142014] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1270.142272] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71888603-d5c9-4454-bea6-d1387448ea72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.148587] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1270.148587] env[68244]: value = "task-2781337" [ 1270.148587] env[68244]: _type = "Task" [ 1270.148587] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.156403] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.212847] env[68244]: DEBUG nova.scheduler.client.report [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.636717] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781336, 'name': ReconfigVM_Task, 'duration_secs': 0.238913} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.639278] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance 'a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1270.656987] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065793} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.657264] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1270.658146] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6676831-11be-4ba9-993a-a466397bc99f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.680542] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1270.683249] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0835c06c-c770-439d-a731-3f250cbb57f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.704230] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1270.704230] env[68244]: value = "task-2781338" [ 1270.704230] env[68244]: _type = "Task" [ 1270.704230] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.712731] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.717718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.718260] env[68244]: DEBUG nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1270.808495] env[68244]: DEBUG nova.network.neutron [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099ae899-d6", "ovs_interfaceid": "099ae899-d602-45fd-bdcf-deda125a5d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.910363] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "256a0329-07b6-4bc2-a574-6e5a108d301a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.910588] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.146794] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1271.147232] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1271.147437] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1271.147639] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1271.147810] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1271.147965] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1271.148193] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1271.148357] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1271.148527] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1271.148693] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1271.149685] env[68244]: DEBUG nova.virt.hardware [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1271.155466] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Reconfiguring VM instance instance-0000004e to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1271.155817] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baee8407-fd8e-47f1-a5cb-95e9abec1f37 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.174571] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1271.174571] env[68244]: value = "task-2781339" [ 1271.174571] env[68244]: _type = "Task" [ 1271.174571] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.182589] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.214440] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781338, 'name': ReconfigVM_Task, 'duration_secs': 0.313582} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.214748] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa/0597e8ed-2f24-44c7-ac92-06af34d6a4fa.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.216032] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encrypted': False, 'size': 0, 'boot_index': 0, 'device_type': 'disk', 'guest_format': None, 'disk_bus': None, 'device_name': '/dev/sda', 'encryption_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'image_id': '9aa0b4d1-af1b-4141-9ca6-95525b722d7e'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'mount_device': '/dev/sdb', 'boot_index': None, 'guest_format': None, 'attachment_id': '6974866d-6400-4d9e-b96f-28839c16439e', 'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'}, 'volume_type': None}], 'swap': None} {{(pid=68244) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1271.216299] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1271.216567] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1271.217494] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e44174-1c22-4f61-bc31-946cc5cbd0fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.234299] env[68244]: DEBUG nova.compute.utils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1271.235862] env[68244]: DEBUG nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1271.236044] env[68244]: DEBUG nova.network.neutron [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1271.238313] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57c063a-fb30-42ff-903e-32692771b9b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.266133] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f/volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.267026] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ac95380-d671-4ceb-a6e9-3cc166ee2262 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.282434] env[68244]: DEBUG nova.policy [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1271.289908] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1271.289908] env[68244]: value = "task-2781340" [ 1271.289908] env[68244]: _type = "Task" [ 1271.289908] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.298371] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781340, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.312938] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.348441] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='dc0c92fcb4bfb9ba5e5943b5bd78794e',container_format='bare',created_at=2025-03-06T03:30:42Z,direct_url=,disk_format='vmdk',id=a77776b8-a4f1-499e-9cc0-b9dcbb5673ee,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2116447903-shelved',owner='d41b4d274faa4f5a8951d39fa0d0c714',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-03-06T03:30:58Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1271.348737] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1271.348934] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1271.349175] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1271.349364] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1271.349558] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1271.349960] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1271.350161] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1271.350342] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1271.350507] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1271.350681] env[68244]: DEBUG nova.virt.hardware [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1271.351559] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8aaa50-b0e8-4a3e-881c-975852fc4133 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.360123] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28e3c3d-6620-4dc5-b80f-4eca2090afa9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.374118] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:bb:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '099ae899-d602-45fd-bdcf-deda125a5d3e', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1271.381710] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1271.382022] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1271.382275] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-519ca590-b12b-49f1-becf-e7497e60c09c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.403552] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1271.403552] env[68244]: value = "task-2781341" [ 1271.403552] env[68244]: _type = "Task" [ 1271.403552] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.411726] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781341, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.413268] env[68244]: DEBUG nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1271.590102] env[68244]: DEBUG nova.network.neutron [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Successfully created port: c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1271.684873] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781339, 'name': ReconfigVM_Task, 'duration_secs': 0.161754} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.685277] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Reconfigured VM instance instance-0000004e to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1271.685965] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c87a2f9-4e9e-44b5-add2-4d7b35241bf1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.708335] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1/a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.708634] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a465fb38-be6d-4333-9155-6da281c637cb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.726717] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1271.726717] env[68244]: value = "task-2781342" [ 1271.726717] env[68244]: _type = "Task" [ 1271.726717] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.734522] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781342, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.742159] env[68244]: DEBUG nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1271.802393] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781340, 'name': ReconfigVM_Task, 'duration_secs': 0.297202} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.802733] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f/volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.807793] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2d8a081-30f5-4958-bede-79101e0e4745 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.823779] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1271.823779] env[68244]: value = "task-2781343" [ 1271.823779] env[68244]: _type = "Task" [ 1271.823779] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.831858] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.917024] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781341, 'name': CreateVM_Task, 'duration_secs': 0.335919} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.917024] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1271.917024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.917024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.917024] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1271.917024] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ae20ecd-b758-4327-a1ad-e7f3f64b8d56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.925338] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1271.925338] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528e60a4-d630-23bb-7cde-3a44fad8f52b" [ 1271.925338] env[68244]: _type = "Task" [ 1271.925338] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.937283] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528e60a4-d630-23bb-7cde-3a44fad8f52b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.945018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.945018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.945018] env[68244]: INFO nova.compute.claims [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1271.999599] env[68244]: DEBUG nova.compute.manager [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1271.999985] env[68244]: DEBUG nova.compute.manager [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing instance network info cache due to event network-changed-099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1272.000307] env[68244]: DEBUG oslo_concurrency.lockutils [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] Acquiring lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.000555] env[68244]: DEBUG oslo_concurrency.lockutils [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] Acquired lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.000836] env[68244]: DEBUG nova.network.neutron [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Refreshing network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1272.237046] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781342, 'name': ReconfigVM_Task, 'duration_secs': 0.276684} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.237339] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Reconfigured VM instance instance-0000004e to attach disk [datastore2] a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1/a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1272.237597] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance 'a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1272.333761] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781343, 'name': ReconfigVM_Task, 'duration_secs': 0.147161} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.333899] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1272.334582] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbd1a21d-04f6-45e3-ada3-1692f3cb1c99 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.340315] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1272.340315] env[68244]: value = "task-2781344" [ 1272.340315] env[68244]: _type = "Task" [ 1272.340315] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.347684] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781344, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.437898] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.438149] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Processing image a77776b8-a4f1-499e-9cc0-b9dcbb5673ee {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1272.438434] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.438598] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.438792] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1272.439249] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4c7f25d-090f-4d38-a463-b4ed4184d91a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.448509] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1272.448509] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1272.452293] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09b5e179-c674-4cc4-93ca-9e11718d6840 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.458081] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1272.458081] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520f66fe-fa0b-397a-794e-35b1487fc738" [ 1272.458081] env[68244]: _type = "Task" [ 1272.458081] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.470643] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]520f66fe-fa0b-397a-794e-35b1487fc738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.736523] env[68244]: DEBUG nova.network.neutron [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updated VIF entry in instance network info cache for port 099ae899-d602-45fd-bdcf-deda125a5d3e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1272.736888] env[68244]: DEBUG nova.network.neutron [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [{"id": "099ae899-d602-45fd-bdcf-deda125a5d3e", "address": "fa:16:3e:84:bb:6f", "network": {"id": "b0de602b-9c4a-4c06-acd4-b6e9c6b97589", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1207322481-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d41b4d274faa4f5a8951d39fa0d0c714", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099ae899-d6", "ovs_interfaceid": "099ae899-d602-45fd-bdcf-deda125a5d3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.743562] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02dc47f0-a55a-4b80-b755-ff5368e4746e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.764487] env[68244]: DEBUG nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1272.768038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bf1517-cc72-4810-aed7-e29dd3f4bd7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.784991] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance 'a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1272.795420] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1272.795647] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1272.795802] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1272.795984] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1272.796144] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1272.796290] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1272.796492] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1272.796649] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1272.796819] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1272.796973] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1272.797158] env[68244]: DEBUG nova.virt.hardware [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1272.797983] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0553f143-672f-43ab-9830-e2c624996646 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.805494] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a745c85-1034-48cc-afee-211a818645dd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.849014] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781344, 'name': Rename_Task, 'duration_secs': 0.143487} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.849310] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.849538] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41cef042-048c-49cb-a499-e2cc37ea1a9e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.855626] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1272.855626] env[68244]: value = "task-2781345" [ 1272.855626] env[68244]: _type = "Task" [ 1272.855626] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.863275] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.969522] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Preparing fetch location {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1272.969938] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Fetch image to [datastore2] OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4/OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4.vmdk {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1272.970202] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Downloading stream optimized image a77776b8-a4f1-499e-9cc0-b9dcbb5673ee to [datastore2] OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4/OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4.vmdk on the data store datastore2 as vApp {{(pid=68244) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1272.970414] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Downloading image file data a77776b8-a4f1-499e-9cc0-b9dcbb5673ee to the ESX as VM named 'OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4' {{(pid=68244) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1273.023578] env[68244]: DEBUG nova.network.neutron [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Successfully updated port: c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1273.049859] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1273.049859] env[68244]: value = "resgroup-9" [ 1273.049859] env[68244]: _type = "ResourcePool" [ 1273.049859] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1273.050164] env[68244]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-ba9a897f-5b8c-4c5d-be66-406985751368 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.074143] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease: (returnval){ [ 1273.074143] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52405b20-dbcb-db7c-8a04-2b06607aeb2f" [ 1273.074143] env[68244]: _type = "HttpNfcLease" [ 1273.074143] env[68244]: } obtained for vApp import into resource pool (val){ [ 1273.074143] env[68244]: value = "resgroup-9" [ 1273.074143] env[68244]: _type = "ResourcePool" [ 1273.074143] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1273.074412] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the lease: (returnval){ [ 1273.074412] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52405b20-dbcb-db7c-8a04-2b06607aeb2f" [ 1273.074412] env[68244]: _type = "HttpNfcLease" [ 1273.074412] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1273.082954] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1273.082954] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52405b20-dbcb-db7c-8a04-2b06607aeb2f" [ 1273.082954] env[68244]: _type = "HttpNfcLease" [ 1273.082954] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1273.124405] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffcc5c5-ef86-4f4e-aedf-a0e3df0d0846 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.133810] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16eefe4c-6780-40b1-a228-5e12a448fcbf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.168015] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11866588-ed70-41ae-9fad-46a00c3639ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.176173] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcacc3b-e678-445a-8a4c-921448fd788f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.190332] env[68244]: DEBUG nova.compute.provider_tree [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.239966] env[68244]: DEBUG oslo_concurrency.lockutils [req-dd794c89-f912-431d-a4b9-297ecc54cb83 req-7965e9d3-ea0c-469f-987b-9b04ab28b63f service nova] Releasing lock "refresh_cache-f1143201-5ee1-45be-b2b1-4314a26aa10a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.325098] env[68244]: DEBUG nova.network.neutron [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Port a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1273.366327] env[68244]: DEBUG oslo_vmware.api [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781345, 'name': PowerOnVM_Task, 'duration_secs': 0.432515} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.366573] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1273.366785] env[68244]: DEBUG nova.compute.manager [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1273.367582] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253e0c85-6aa9-4756-ba5f-94377c83bf0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.528834] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.529012] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.529183] env[68244]: DEBUG nova.network.neutron [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1273.582657] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1273.582657] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52405b20-dbcb-db7c-8a04-2b06607aeb2f" [ 1273.582657] env[68244]: _type = "HttpNfcLease" [ 1273.582657] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1273.582986] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1273.582986] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52405b20-dbcb-db7c-8a04-2b06607aeb2f" [ 1273.582986] env[68244]: _type = "HttpNfcLease" [ 1273.582986] env[68244]: }. {{(pid=68244) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1273.583706] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3b1b09-6a32-4951-ac47-a843beee61b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.590795] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52527967-0522-de6f-15df-90608e99b4a3/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1273.590966] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52527967-0522-de6f-15df-90608e99b4a3/disk-0.vmdk. {{(pid=68244) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1273.655074] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2939e4e1-9fbb-4d40-af33-bd5021264821 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.693304] env[68244]: DEBUG nova.scheduler.client.report [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1273.885986] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.029686] env[68244]: DEBUG nova.compute.manager [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-vif-plugged-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1274.029686] env[68244]: DEBUG oslo_concurrency.lockutils [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] Acquiring lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.029686] env[68244]: DEBUG oslo_concurrency.lockutils [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.029686] env[68244]: DEBUG oslo_concurrency.lockutils [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.029947] env[68244]: DEBUG nova.compute.manager [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] No waiting events found dispatching network-vif-plugged-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1274.030171] env[68244]: WARNING nova.compute.manager [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received unexpected event network-vif-plugged-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 for instance with vm_state building and task_state spawning. [ 1274.030348] env[68244]: DEBUG nova.compute.manager [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1274.030528] env[68244]: DEBUG nova.compute.manager [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing instance network info cache due to event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1274.030753] env[68244]: DEBUG oslo_concurrency.lockutils [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.067227] env[68244]: DEBUG nova.network.neutron [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1274.198685] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.199238] env[68244]: DEBUG nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1274.204023] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.318s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.204285] env[68244]: DEBUG nova.objects.instance [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1274.207772] env[68244]: DEBUG nova.network.neutron [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.363654] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.363878] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.364110] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.711326] env[68244]: DEBUG nova.compute.utils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1274.716114] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.716114] env[68244]: DEBUG nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Instance network_info: |[{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1274.716376] env[68244]: DEBUG nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1274.716376] env[68244]: DEBUG nova.network.neutron [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1274.719978] env[68244]: DEBUG oslo_concurrency.lockutils [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.720163] env[68244]: DEBUG nova.network.neutron [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1274.721208] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:51:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c38bbec1-b6bf-4b43-9914-b4dc63d8e894', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1274.728725] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1274.732377] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1274.734305] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Completed reading data from the image iterator. {{(pid=68244) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1274.734492] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52527967-0522-de6f-15df-90608e99b4a3/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1274.734712] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dea18776-6535-4826-a91b-edde704547e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.750101] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7731c56f-360c-4223-8bca-e0d840d5907e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.757099] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52527967-0522-de6f-15df-90608e99b4a3/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1274.757348] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52527967-0522-de6f-15df-90608e99b4a3/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1274.759018] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-af79302c-e0fd-4bb0-a3fd-5678f0867cca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.760602] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1274.760602] env[68244]: value = "task-2781347" [ 1274.760602] env[68244]: _type = "Task" [ 1274.760602] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.768858] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781347, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.778862] env[68244]: DEBUG nova.policy [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f93ab312c1f44d7877c43a7b101cb5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4349b19805a8498392649e1b825d5da7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1274.951103] env[68244]: DEBUG oslo_vmware.rw_handles [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52527967-0522-de6f-15df-90608e99b4a3/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1274.951487] env[68244]: INFO nova.virt.vmwareapi.images [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Downloaded image file data a77776b8-a4f1-499e-9cc0-b9dcbb5673ee [ 1274.952231] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72990f35-b45e-4780-aa88-a59910b142ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.967667] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e732ce5-5711-4b01-9ce1-c1b71739afb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.982541] env[68244]: DEBUG nova.network.neutron [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updated VIF entry in instance network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1274.982798] env[68244]: DEBUG nova.network.neutron [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.999466] env[68244]: INFO nova.virt.vmwareapi.images [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] The imported VM was unregistered [ 1275.002074] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Caching image {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1275.002347] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Creating directory with path [datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.002940] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae93fec4-6b8a-41d2-9ddb-918e3d9bb0ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.028499] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Created directory with path [datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.028866] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4/OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4.vmdk to [datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee.vmdk. {{(pid=68244) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1275.029083] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-db5c3828-6048-4224-8820-487762cc997f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.036787] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1275.036787] env[68244]: value = "task-2781349" [ 1275.036787] env[68244]: _type = "Task" [ 1275.036787] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.045381] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781349, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.071501] env[68244]: DEBUG nova.network.neutron [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Successfully created port: 67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1275.220170] env[68244]: DEBUG oslo_concurrency.lockutils [None req-428db045-dcc6-4586-8275-1e01a1746bfc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.221518] env[68244]: DEBUG nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1275.272804] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781347, 'name': CreateVM_Task, 'duration_secs': 0.359724} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.273045] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1275.273752] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.273969] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.274311] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1275.274572] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffc50b89-42cb-44db-bee0-cfc33a918193 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.282305] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1275.282305] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209622d-9346-550e-05f1-6f4a52b981a7" [ 1275.282305] env[68244]: _type = "Task" [ 1275.282305] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.292450] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209622d-9346-550e-05f1-6f4a52b981a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.402918] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.403221] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.403577] env[68244]: DEBUG nova.network.neutron [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1275.485674] env[68244]: DEBUG oslo_concurrency.lockutils [req-625a94a4-348f-49a0-a4b4-9ddd2b3f3b78 req-07623535-ca48-49d8-b3f0-b5c976971647 service nova] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.549708] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781349, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.795106] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5209622d-9346-550e-05f1-6f4a52b981a7, 'name': SearchDatastore_Task, 'duration_secs': 0.05094} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.795462] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.795670] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1275.795909] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.796070] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.796260] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.796544] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-132ef3e2-6f55-496a-b7c4-a30f49bb3748 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.816452] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.816701] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1275.817468] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8844d170-fe5b-4356-8705-51475b7f7b6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.826324] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1275.826324] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a6e4c-e587-6474-9062-9ddaf1e4fab8" [ 1275.826324] env[68244]: _type = "Task" [ 1275.826324] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.837450] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a6e4c-e587-6474-9062-9ddaf1e4fab8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.049366] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781349, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.175482] env[68244]: DEBUG nova.network.neutron [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.231103] env[68244]: DEBUG nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1276.260424] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1276.260670] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.260826] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1276.261010] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.261166] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1276.261315] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1276.261553] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1276.261676] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1276.261920] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1276.262113] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1276.262313] env[68244]: DEBUG nova.virt.hardware [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1276.263343] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c477510-73a1-45a3-9ac4-f376ab6a5033 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.273984] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ceb64b4-e887-4922-966b-d1dea59080ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.338615] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a6e4c-e587-6474-9062-9ddaf1e4fab8, 'name': SearchDatastore_Task, 'duration_secs': 0.089147} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.339468] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27d09b24-1576-43d3-93b9-22dbcc6251d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.347593] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1276.347593] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524909a8-2cd5-6d99-091b-ecabc1df55c9" [ 1276.347593] env[68244]: _type = "Task" [ 1276.347593] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.358648] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524909a8-2cd5-6d99-091b-ecabc1df55c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.549960] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781349, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.678987] env[68244]: DEBUG oslo_concurrency.lockutils [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.703709] env[68244]: DEBUG nova.compute.manager [req-9eb9b5d2-2b30-4fdd-b8a2-b91e177592de req-cbf03b0d-e6bb-46ed-a2ea-3dbd03f1b0d1 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Received event network-vif-plugged-67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1276.704087] env[68244]: DEBUG oslo_concurrency.lockutils [req-9eb9b5d2-2b30-4fdd-b8a2-b91e177592de req-cbf03b0d-e6bb-46ed-a2ea-3dbd03f1b0d1 service nova] Acquiring lock "256a0329-07b6-4bc2-a574-6e5a108d301a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.704320] env[68244]: DEBUG oslo_concurrency.lockutils [req-9eb9b5d2-2b30-4fdd-b8a2-b91e177592de req-cbf03b0d-e6bb-46ed-a2ea-3dbd03f1b0d1 service nova] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.704506] env[68244]: DEBUG oslo_concurrency.lockutils [req-9eb9b5d2-2b30-4fdd-b8a2-b91e177592de req-cbf03b0d-e6bb-46ed-a2ea-3dbd03f1b0d1 service nova] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.704669] env[68244]: DEBUG nova.compute.manager [req-9eb9b5d2-2b30-4fdd-b8a2-b91e177592de req-cbf03b0d-e6bb-46ed-a2ea-3dbd03f1b0d1 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] No waiting events found dispatching network-vif-plugged-67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1276.704838] env[68244]: WARNING nova.compute.manager [req-9eb9b5d2-2b30-4fdd-b8a2-b91e177592de req-cbf03b0d-e6bb-46ed-a2ea-3dbd03f1b0d1 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Received unexpected event network-vif-plugged-67156309-cd60-4569-ab05-05548717b11b for instance with vm_state building and task_state spawning. [ 1276.803790] env[68244]: DEBUG nova.network.neutron [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Successfully updated port: 67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1276.861562] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524909a8-2cd5-6d99-091b-ecabc1df55c9, 'name': SearchDatastore_Task, 'duration_secs': 0.085354} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.861940] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.862303] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 64467948-35bb-4ad7-ac76-bbbd6f66e96f/64467948-35bb-4ad7-ac76-bbbd6f66e96f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1276.862587] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1cbf3c3-0cd2-412c-bfac-a579ea426272 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.875264] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1276.875264] env[68244]: value = "task-2781350" [ 1276.875264] env[68244]: _type = "Task" [ 1276.875264] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.887267] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.051672] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781349, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.206665] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ab4ef6-bbe6-4348-a6a8-bd25ab252249 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.230209] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd09c67-09f6-4b44-a856-29f7239bbf20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.240037] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance 'a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1277.307565] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.307815] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1277.308189] env[68244]: DEBUG nova.network.neutron [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1277.385063] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.549821] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781349, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.40873} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.550121] env[68244]: INFO nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4/OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4.vmdk to [datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee.vmdk. [ 1277.550302] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Cleaning up location [datastore2] OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4 {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1277.550467] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_fe53b852-481a-4dec-a5ba-94c3d2f80cb4 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1277.550745] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1787132a-23b3-4294-9020-26795776965e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.557981] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1277.557981] env[68244]: value = "task-2781351" [ 1277.557981] env[68244]: _type = "Task" [ 1277.557981] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.567829] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.747859] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1277.748242] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-add01f1b-dab8-42ff-b8cd-c317e7ad94d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.758276] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1277.758276] env[68244]: value = "task-2781352" [ 1277.758276] env[68244]: _type = "Task" [ 1277.758276] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.767009] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.839345] env[68244]: DEBUG nova.network.neutron [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1277.886544] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.912365} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.886755] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 64467948-35bb-4ad7-ac76-bbbd6f66e96f/64467948-35bb-4ad7-ac76-bbbd6f66e96f.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1277.886904] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1277.887494] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef4e929d-d2d9-4896-8327-734209ac8fcf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.894023] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1277.894023] env[68244]: value = "task-2781353" [ 1277.894023] env[68244]: _type = "Task" [ 1277.894023] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.903387] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.971355] env[68244]: DEBUG nova.network.neutron [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Updating instance_info_cache with network_info: [{"id": "67156309-cd60-4569-ab05-05548717b11b", "address": "fa:16:3e:9b:86:bc", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67156309-cd", "ovs_interfaceid": "67156309-cd60-4569-ab05-05548717b11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.067953] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175052} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.068428] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1278.068619] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1278.068869] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee.vmdk to [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1278.069214] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f3d49ad-014a-477c-9aea-2574c74de623 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.076407] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1278.076407] env[68244]: value = "task-2781354" [ 1278.076407] env[68244]: _type = "Task" [ 1278.076407] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.083941] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.270504] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781352, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.406220] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067942} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.406495] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1278.407385] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaeaf18c-7c6a-4696-9869-fed5dcf335c2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.432972] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 64467948-35bb-4ad7-ac76-bbbd6f66e96f/64467948-35bb-4ad7-ac76-bbbd6f66e96f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1278.433325] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-692d072f-8f53-4db5-8405-d04113b60027 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.454908] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1278.454908] env[68244]: value = "task-2781355" [ 1278.454908] env[68244]: _type = "Task" [ 1278.454908] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.466862] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.474688] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1278.475019] env[68244]: DEBUG nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Instance network_info: |[{"id": "67156309-cd60-4569-ab05-05548717b11b", "address": "fa:16:3e:9b:86:bc", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67156309-cd", "ovs_interfaceid": "67156309-cd60-4569-ab05-05548717b11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1278.475466] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:86:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd38fdec-d092-4a84-ab41-685f6dbb4f29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67156309-cd60-4569-ab05-05548717b11b', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1278.483669] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1278.483903] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1278.484158] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97850c46-5117-4cfd-bbac-75aa7f8c5f6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.506928] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1278.506928] env[68244]: value = "task-2781356" [ 1278.506928] env[68244]: _type = "Task" [ 1278.506928] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.518086] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781356, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.589234] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781354, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.732213] env[68244]: DEBUG nova.compute.manager [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Received event network-changed-67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1278.732546] env[68244]: DEBUG nova.compute.manager [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Refreshing instance network info cache due to event network-changed-67156309-cd60-4569-ab05-05548717b11b. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1278.732622] env[68244]: DEBUG oslo_concurrency.lockutils [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] Acquiring lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.732764] env[68244]: DEBUG oslo_concurrency.lockutils [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] Acquired lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.732966] env[68244]: DEBUG nova.network.neutron [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Refreshing network info cache for port 67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1278.772065] env[68244]: DEBUG oslo_vmware.api [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781352, 'name': PowerOnVM_Task, 'duration_secs': 0.882938} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.772065] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1278.772065] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-11458663-97a1-4b81-95dd-804d34028433 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance 'a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1278.967478] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.019488] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781356, 'name': CreateVM_Task} progress is 25%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.090593] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781354, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.448876] env[68244]: DEBUG nova.network.neutron [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Updated VIF entry in instance network info cache for port 67156309-cd60-4569-ab05-05548717b11b. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1279.449371] env[68244]: DEBUG nova.network.neutron [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Updating instance_info_cache with network_info: [{"id": "67156309-cd60-4569-ab05-05548717b11b", "address": "fa:16:3e:9b:86:bc", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67156309-cd", "ovs_interfaceid": "67156309-cd60-4569-ab05-05548717b11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.468606] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.519679] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781356, 'name': CreateVM_Task, 'duration_secs': 0.721609} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.519894] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1279.520707] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.520907] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.521261] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1279.521554] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86915b0a-b74b-4792-94da-4f3d92355aab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.529019] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1279.529019] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7e155-f0b3-73e3-cc66-14de3114a2b6" [ 1279.529019] env[68244]: _type = "Task" [ 1279.529019] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.538961] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7e155-f0b3-73e3-cc66-14de3114a2b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.591091] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781354, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.951901] env[68244]: DEBUG oslo_concurrency.lockutils [req-b5a96e05-224d-4276-a9b1-bba140cbf63b req-1d324c5e-160c-4277-a2f5-1bfbf985f4b4 service nova] Releasing lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.970363] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.041008] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f7e155-f0b3-73e3-cc66-14de3114a2b6, 'name': SearchDatastore_Task, 'duration_secs': 0.082434} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.041379] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.041621] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1280.041881] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.042028] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.042242] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1280.042523] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f77f85f-cef2-4e42-9236-33a5f9633686 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.061581] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1280.061994] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1280.062928] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a98af22b-7930-486d-a25b-81aaba26fe6d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.071098] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1280.071098] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5218606e-566e-2ec7-9443-757fd3384f23" [ 1280.071098] env[68244]: _type = "Task" [ 1280.071098] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.080453] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5218606e-566e-2ec7-9443-757fd3384f23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.092281] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781354, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.465982] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781355, 'name': ReconfigVM_Task, 'duration_secs': 2.005304} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.466290] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 64467948-35bb-4ad7-ac76-bbbd6f66e96f/64467948-35bb-4ad7-ac76-bbbd6f66e96f.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1280.466929] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40d99ea7-3e39-4454-b4b5-e23c958f83df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.472832] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1280.472832] env[68244]: value = "task-2781357" [ 1280.472832] env[68244]: _type = "Task" [ 1280.472832] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.480895] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781357, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.582427] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5218606e-566e-2ec7-9443-757fd3384f23, 'name': SearchDatastore_Task, 'duration_secs': 0.084188} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.585943] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14f5171e-8adf-4680-b767-69c646265eb1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.593629] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781354, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.245678} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.594354] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee/a77776b8-a4f1-499e-9cc0-b9dcbb5673ee.vmdk to [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1280.594705] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1280.594705] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5212c602-5864-a7fa-84ea-e439b568b967" [ 1280.594705] env[68244]: _type = "Task" [ 1280.594705] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.595392] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9c8ad3-a2a6-46ab-b817-97a5024f4c22 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.604877] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5212c602-5864-a7fa-84ea-e439b568b967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.622025] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1280.622264] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-818d8371-bed9-474e-b495-466f6f689912 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.640945] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1280.640945] env[68244]: value = "task-2781358" [ 1280.640945] env[68244]: _type = "Task" [ 1280.640945] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.648711] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781358, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.983011] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781357, 'name': Rename_Task, 'duration_secs': 0.160152} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.983330] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1280.983569] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bed99fd-d73d-48ae-94cf-c432b17b55d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.989369] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1280.989369] env[68244]: value = "task-2781359" [ 1280.989369] env[68244]: _type = "Task" [ 1280.989369] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.996432] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.004973] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.005221] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.005404] env[68244]: DEBUG nova.compute.manager [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Going to confirm migration 7 {{(pid=68244) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1281.107624] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5212c602-5864-a7fa-84ea-e439b568b967, 'name': SearchDatastore_Task, 'duration_secs': 0.014659} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.107985] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.108164] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 256a0329-07b6-4bc2-a574-6e5a108d301a/256a0329-07b6-4bc2-a574-6e5a108d301a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1281.108428] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71e5bdf6-b392-460b-9e04-d43914518856 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.116515] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1281.116515] env[68244]: value = "task-2781360" [ 1281.116515] env[68244]: _type = "Task" [ 1281.116515] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.123559] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.149659] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781358, 'name': ReconfigVM_Task, 'duration_secs': 0.262676} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.149968] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a/f1143201-5ee1-45be-b2b1-4314a26aa10a.vmdk or device None with type streamOptimized {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1281.150600] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8cdb0472-c5c4-4675-a2a1-881f46979dd4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.155933] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1281.155933] env[68244]: value = "task-2781361" [ 1281.155933] env[68244]: _type = "Task" [ 1281.155933] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.163632] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781361, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.500256] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781359, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.579218] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.579416] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.579603] env[68244]: DEBUG nova.network.neutron [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1281.579831] env[68244]: DEBUG nova.objects.instance [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'info_cache' on Instance uuid a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1281.627235] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781360, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.665313] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781361, 'name': Rename_Task, 'duration_secs': 0.166102} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.665702] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1281.665891] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e2e874f-d358-4601-9e72-f901b2a3643d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.671870] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1281.671870] env[68244]: value = "task-2781362" [ 1281.671870] env[68244]: _type = "Task" [ 1281.671870] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.679775] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781362, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.000116] env[68244]: DEBUG oslo_vmware.api [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781359, 'name': PowerOnVM_Task, 'duration_secs': 0.670573} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.000379] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1282.000585] env[68244]: INFO nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1282.000766] env[68244]: DEBUG nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1282.001630] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a46b3b2-5fc8-458e-8f35-779ce5bb5ed3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.127569] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53797} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.128127] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 256a0329-07b6-4bc2-a574-6e5a108d301a/256a0329-07b6-4bc2-a574-6e5a108d301a.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1282.128127] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1282.128257] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dd6b6fd-cc4e-42b2-890d-04441f1449ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.134881] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1282.134881] env[68244]: value = "task-2781363" [ 1282.134881] env[68244]: _type = "Task" [ 1282.134881] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.143015] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.180848] env[68244]: DEBUG oslo_vmware.api [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781362, 'name': PowerOnVM_Task, 'duration_secs': 0.459845} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.181110] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1282.291207] env[68244]: DEBUG nova.compute.manager [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1282.292356] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759853d1-f94b-4816-b99a-bc8106bbcb0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.518221] env[68244]: INFO nova.compute.manager [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Took 18.18 seconds to build instance. [ 1282.644185] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170417} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.644436] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1282.645248] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0d0dea-4f78-402c-8371-19cda0f1e32b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.666689] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 256a0329-07b6-4bc2-a574-6e5a108d301a/256a0329-07b6-4bc2-a574-6e5a108d301a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1282.669364] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e1068b4-0125-4b53-8878-f56c4a1f202e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.689298] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1282.689298] env[68244]: value = "task-2781364" [ 1282.689298] env[68244]: _type = "Task" [ 1282.689298] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.696866] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.809039] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea426d8f-eb1d-49bb-8d14-57c5a15725aa tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.386s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.837485] env[68244]: DEBUG nova.network.neutron [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [{"id": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "address": "fa:16:3e:21:1e:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0588ac9-a7", "ovs_interfaceid": "a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.020840] env[68244]: DEBUG oslo_concurrency.lockutils [None req-badf48bb-2fa4-4083-a1f6-ab92fb1d6a3f tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.686s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1283.086185] env[68244]: DEBUG nova.compute.manager [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1283.086500] env[68244]: DEBUG nova.compute.manager [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing instance network info cache due to event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1283.087902] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.087902] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.087902] env[68244]: DEBUG nova.network.neutron [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1283.199623] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781364, 'name': ReconfigVM_Task, 'duration_secs': 0.478639} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.199942] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 256a0329-07b6-4bc2-a574-6e5a108d301a/256a0329-07b6-4bc2-a574-6e5a108d301a.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1283.200795] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1af41cc9-9e85-4c74-ae0b-2b1f5e2972b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.206864] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1283.206864] env[68244]: value = "task-2781365" [ 1283.206864] env[68244]: _type = "Task" [ 1283.206864] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.215242] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781365, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.340330] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.340720] env[68244]: DEBUG nova.objects.instance [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'migration_context' on Instance uuid a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1283.717342] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781365, 'name': Rename_Task, 'duration_secs': 0.200304} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.717642] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1283.717888] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-763f61c1-13f2-4e03-8591-bd14c7c2e4de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.724672] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1283.724672] env[68244]: value = "task-2781366" [ 1283.724672] env[68244]: _type = "Task" [ 1283.724672] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.733329] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781366, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.792653] env[68244]: DEBUG nova.network.neutron [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updated VIF entry in instance network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.793100] env[68244]: DEBUG nova.network.neutron [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.845867] env[68244]: DEBUG nova.objects.base [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1283.846821] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322fc591-768e-4e3d-a636-570d00e4d498 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.868180] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b1c69d5-c85e-4d5a-88ef-eb3afc14b317 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.873920] env[68244]: DEBUG oslo_vmware.api [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1283.873920] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52716216-123e-6f28-31e5-ddeae6c81323" [ 1283.873920] env[68244]: _type = "Task" [ 1283.873920] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.881693] env[68244]: DEBUG oslo_vmware.api [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52716216-123e-6f28-31e5-ddeae6c81323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.974938] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "0c949d23-d98f-47d2-9f3c-d520df035d55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.975271] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.235082] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781366, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.295460] env[68244]: DEBUG oslo_concurrency.lockutils [req-2ff536ba-6d66-492e-bff6-1a89d8938d42 req-734676f5-13ad-495e-b16b-559a8c509944 service nova] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.384645] env[68244]: DEBUG oslo_vmware.api [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52716216-123e-6f28-31e5-ddeae6c81323, 'name': SearchDatastore_Task, 'duration_secs': 0.006661} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.384943] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.385211] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.477433] env[68244]: DEBUG nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1284.736803] env[68244]: DEBUG oslo_vmware.api [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781366, 'name': PowerOnVM_Task, 'duration_secs': 0.517697} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.737153] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1284.737402] env[68244]: INFO nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Took 8.51 seconds to spawn the instance on the hypervisor. [ 1284.737598] env[68244]: DEBUG nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1284.738482] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde08331-76b8-443a-9974-8079797db574 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.998245] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.018518] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0095edd5-cf0a-4266-9921-46ba44a66cc6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.026526] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a6dfdc-cc0f-41ec-b271-749894d56083 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.058439] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6225acb4-df2e-4592-b905-5e52568ab4a8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.065612] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bc5c74-0b25-4899-9441-5bd93ecfb3e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.078721] env[68244]: DEBUG nova.compute.provider_tree [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.255792] env[68244]: INFO nova.compute.manager [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Took 13.34 seconds to build instance. [ 1285.598925] env[68244]: ERROR nova.scheduler.client.report [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [req-a6f12385-f35e-4af4-9e54-38c0b6f63d7f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a6f12385-f35e-4af4-9e54-38c0b6f63d7f"}]} [ 1285.614678] env[68244]: DEBUG nova.scheduler.client.report [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1285.627181] env[68244]: DEBUG nova.scheduler.client.report [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1285.627410] env[68244]: DEBUG nova.compute.provider_tree [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.637750] env[68244]: DEBUG nova.scheduler.client.report [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1285.655657] env[68244]: DEBUG nova.scheduler.client.report [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1285.758110] env[68244]: DEBUG oslo_concurrency.lockutils [None req-a1eb4a4a-c283-4032-ad7c-0c62e7ef5112 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.847s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.762061] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5170bb-77bb-4ba5-894c-d90396453a5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.769309] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dc036a-0326-4bc1-bd63-63c80ab89d0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.799214] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bf2a43-6016-4fd0-8c57-9274b29367c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.806569] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74c3f31-2c4c-4415-9f81-0e9aa97b6ca2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.820819] env[68244]: DEBUG nova.compute.provider_tree [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1286.353995] env[68244]: DEBUG nova.scheduler.client.report [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1286.354393] env[68244]: DEBUG nova.compute.provider_tree [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 166 to 167 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1286.354553] env[68244]: DEBUG nova.compute.provider_tree [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1286.571563] env[68244]: DEBUG nova.compute.manager [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Received event network-changed-67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1286.571717] env[68244]: DEBUG nova.compute.manager [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Refreshing instance network info cache due to event network-changed-67156309-cd60-4569-ab05-05548717b11b. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1286.572052] env[68244]: DEBUG oslo_concurrency.lockutils [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] Acquiring lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.572186] env[68244]: DEBUG oslo_concurrency.lockutils [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] Acquired lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.572355] env[68244]: DEBUG nova.network.neutron [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Refreshing network info cache for port 67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1287.290466] env[68244]: DEBUG nova.network.neutron [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Updated VIF entry in instance network info cache for port 67156309-cd60-4569-ab05-05548717b11b. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1287.290852] env[68244]: DEBUG nova.network.neutron [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Updating instance_info_cache with network_info: [{"id": "67156309-cd60-4569-ab05-05548717b11b", "address": "fa:16:3e:9b:86:bc", "network": {"id": "7efd4a84-009b-489c-9f07-6c0d957a304d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1830805074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4349b19805a8498392649e1b825d5da7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd38fdec-d092-4a84-ab41-685f6dbb4f29", "external-id": "nsx-vlan-transportzone-622", "segmentation_id": 622, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67156309-cd", "ovs_interfaceid": "67156309-cd60-4569-ab05-05548717b11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.365774] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.980s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.370009] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.372s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.372483] env[68244]: INFO nova.compute.claims [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1287.794414] env[68244]: DEBUG oslo_concurrency.lockutils [req-8227dea3-d934-4b00-9f51-172ee6a690ef req-e58d6bf5-17ae-4332-8d25-e777edd93064 service nova] Releasing lock "refresh_cache-256a0329-07b6-4bc2-a574-6e5a108d301a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.933759] env[68244]: INFO nova.scheduler.client.report [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted allocation for migration 741481c1-9553-416e-9f78-9b4461def4fc [ 1288.216648] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.216915] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.217157] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.217377] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.217564] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.219673] env[68244]: INFO nova.compute.manager [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Terminating instance [ 1288.439209] env[68244]: DEBUG oslo_concurrency.lockutils [None req-d5e0e9fa-2cd4-4b4e-91c5-65c04c2cda8a tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.434s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.498371] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3e8cb2-0587-43b1-8d58-62a67dce759b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.505399] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e4f39c-e63d-4686-9e4a-dce0e844fa53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.535806] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3e25f6-1399-4ac5-abd6-bbca816b195d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.542722] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deae7941-092d-4803-b5b0-9955b5b864a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.555814] env[68244]: DEBUG nova.compute.provider_tree [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.723828] env[68244]: DEBUG nova.compute.manager [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1288.724094] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1288.724980] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbb8629-f08b-4b0c-b383-62cb1a796415 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.732810] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.733085] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a71a4b9f-7250-4944-84c7-520339a826fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.738705] env[68244]: DEBUG oslo_vmware.api [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1288.738705] env[68244]: value = "task-2781367" [ 1288.738705] env[68244]: _type = "Task" [ 1288.738705] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.746942] env[68244]: DEBUG oslo_vmware.api [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781367, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.058721] env[68244]: DEBUG nova.scheduler.client.report [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1289.249687] env[68244]: DEBUG oslo_vmware.api [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781367, 'name': PowerOffVM_Task, 'duration_secs': 0.226416} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.249954] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1289.250140] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1289.250397] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-262afc22-23ea-4cdf-977b-864b570b5d18 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.293755] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.294022] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.294251] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.294426] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.294596] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1289.296872] env[68244]: INFO nova.compute.manager [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Terminating instance [ 1289.340544] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1289.340747] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1289.340928] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Deleting the datastore file [datastore2] dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1289.341230] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca955ee9-7532-49f8-904d-7b501f64a9d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.347787] env[68244]: DEBUG oslo_vmware.api [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1289.347787] env[68244]: value = "task-2781369" [ 1289.347787] env[68244]: _type = "Task" [ 1289.347787] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.355773] env[68244]: DEBUG oslo_vmware.api [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781369, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.563938] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1289.564505] env[68244]: DEBUG nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1289.800779] env[68244]: DEBUG nova.compute.manager [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1289.801052] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1289.801965] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b041e7bd-1bd5-4d98-b14c-f9123be09671 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.809545] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1289.809796] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2071688c-3f68-40db-9683-eede577bd8f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.815324] env[68244]: DEBUG oslo_vmware.api [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1289.815324] env[68244]: value = "task-2781370" [ 1289.815324] env[68244]: _type = "Task" [ 1289.815324] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.823378] env[68244]: DEBUG oslo_vmware.api [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.858127] env[68244]: DEBUG oslo_vmware.api [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781369, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165422} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.858403] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.858583] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.858765] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.858943] env[68244]: INFO nova.compute.manager [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1289.859220] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1289.859413] env[68244]: DEBUG nova.compute.manager [-] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1289.859509] env[68244]: DEBUG nova.network.neutron [-] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1290.069862] env[68244]: DEBUG nova.compute.utils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1290.071252] env[68244]: DEBUG nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1290.071443] env[68244]: DEBUG nova.network.neutron [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1290.128741] env[68244]: DEBUG nova.policy [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1290.326953] env[68244]: DEBUG oslo_vmware.api [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781370, 'name': PowerOffVM_Task, 'duration_secs': 0.41363} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.328144] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1290.328144] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1290.328144] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3cde311-3b16-40e7-b31d-a7504cd41681 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.359060] env[68244]: DEBUG nova.compute.manager [req-1b864ed2-249c-402a-9487-0db1f701bb7a req-cd05e51b-87bd-40f3-9132-555e46873765 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Received event network-vif-deleted-e73356d6-fa2a-49f0-b862-b5f1644c7579 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1290.359261] env[68244]: INFO nova.compute.manager [req-1b864ed2-249c-402a-9487-0db1f701bb7a req-cd05e51b-87bd-40f3-9132-555e46873765 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Neutron deleted interface e73356d6-fa2a-49f0-b862-b5f1644c7579; detaching it from the instance and deleting it from the info cache [ 1290.359609] env[68244]: DEBUG nova.network.neutron [req-1b864ed2-249c-402a-9487-0db1f701bb7a req-cd05e51b-87bd-40f3-9132-555e46873765 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.411011] env[68244]: DEBUG nova.network.neutron [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Successfully created port: 7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1290.424592] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1290.424592] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1290.424592] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleting the datastore file [datastore2] a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1290.424828] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9570eb3-4c13-435e-b538-8b9d4e41eb2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.431871] env[68244]: DEBUG oslo_vmware.api [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1290.431871] env[68244]: value = "task-2781372" [ 1290.431871] env[68244]: _type = "Task" [ 1290.431871] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.440643] env[68244]: DEBUG oslo_vmware.api [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781372, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.574612] env[68244]: DEBUG nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1290.835691] env[68244]: DEBUG nova.network.neutron [-] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.862408] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d1784ba-a4df-45ab-b011-0e653726a7b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.873880] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfc8131-67b4-41b2-b1ee-304699f11a3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.903971] env[68244]: DEBUG nova.compute.manager [req-1b864ed2-249c-402a-9487-0db1f701bb7a req-cd05e51b-87bd-40f3-9132-555e46873765 service nova] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Detach interface failed, port_id=e73356d6-fa2a-49f0-b862-b5f1644c7579, reason: Instance dfe017bb-d860-4da6-abe5-7e8d7a7dd05a could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1290.941448] env[68244]: DEBUG oslo_vmware.api [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781372, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147259} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.941709] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1290.941879] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1290.942114] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1290.942304] env[68244]: INFO nova.compute.manager [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1290.942539] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1290.942732] env[68244]: DEBUG nova.compute.manager [-] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1290.942831] env[68244]: DEBUG nova.network.neutron [-] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1291.337751] env[68244]: INFO nova.compute.manager [-] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Took 1.48 seconds to deallocate network for instance. [ 1291.584627] env[68244]: DEBUG nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1291.611913] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1291.612221] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1291.612381] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1291.612561] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1291.612707] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1291.612852] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1291.613074] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1291.613239] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1291.613405] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1291.613568] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1291.613738] env[68244]: DEBUG nova.virt.hardware [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1291.614652] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ea1cc8-ab34-4990-96aa-2e2d35c3292f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.622933] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa8b8d1-fc97-4909-b399-08f36f525aef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.788470] env[68244]: DEBUG nova.compute.manager [req-58b45772-f89d-424f-8474-f935e95e000c req-25a321ff-e48f-4e5a-a5ff-a5cabc908f11 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-vif-plugged-7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1291.788822] env[68244]: DEBUG oslo_concurrency.lockutils [req-58b45772-f89d-424f-8474-f935e95e000c req-25a321ff-e48f-4e5a-a5ff-a5cabc908f11 service nova] Acquiring lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.788915] env[68244]: DEBUG oslo_concurrency.lockutils [req-58b45772-f89d-424f-8474-f935e95e000c req-25a321ff-e48f-4e5a-a5ff-a5cabc908f11 service nova] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.789187] env[68244]: DEBUG oslo_concurrency.lockutils [req-58b45772-f89d-424f-8474-f935e95e000c req-25a321ff-e48f-4e5a-a5ff-a5cabc908f11 service nova] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.789452] env[68244]: DEBUG nova.compute.manager [req-58b45772-f89d-424f-8474-f935e95e000c req-25a321ff-e48f-4e5a-a5ff-a5cabc908f11 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] No waiting events found dispatching network-vif-plugged-7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1291.789667] env[68244]: WARNING nova.compute.manager [req-58b45772-f89d-424f-8474-f935e95e000c req-25a321ff-e48f-4e5a-a5ff-a5cabc908f11 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received unexpected event network-vif-plugged-7ddc5996-0e46-45a6-996d-9bad1b9c3955 for instance with vm_state building and task_state spawning. [ 1291.845976] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.846297] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.846637] env[68244]: DEBUG nova.objects.instance [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'resources' on Instance uuid dfe017bb-d860-4da6-abe5-7e8d7a7dd05a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.870195] env[68244]: DEBUG nova.network.neutron [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Successfully updated port: 7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1291.910410] env[68244]: DEBUG nova.network.neutron [-] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.374028] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.374140] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1292.374363] env[68244]: DEBUG nova.network.neutron [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1292.388577] env[68244]: DEBUG nova.compute.manager [req-856e9b7c-7570-40f0-866e-494bb40f03a3 req-b6d14a9e-1808-4e94-bec8-93a04812ef2d service nova] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Received event network-vif-deleted-a0588ac9-a70d-4bc2-ad06-9dba2d2aa43e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1292.412556] env[68244]: INFO nova.compute.manager [-] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Took 1.47 seconds to deallocate network for instance. [ 1292.476580] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd85362-ba47-45bf-9754-341b991c3848 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.484266] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d59f40-d2f1-4e06-9bf3-ce42893d920c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.513597] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb134e78-1ddd-4049-baa1-a9918ca5756d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.521100] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6106c840-f3f0-45d8-93d9-f8860cca8619 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.534503] env[68244]: DEBUG nova.compute.provider_tree [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1292.906349] env[68244]: DEBUG nova.network.neutron [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1292.918688] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.032153] env[68244]: DEBUG nova.network.neutron [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.054715] env[68244]: ERROR nova.scheduler.client.report [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [req-372065df-f03f-4ecb-9bdc-b310e9086dba] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-372065df-f03f-4ecb-9bdc-b310e9086dba"}]} [ 1293.070985] env[68244]: DEBUG nova.scheduler.client.report [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1293.083696] env[68244]: DEBUG nova.scheduler.client.report [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1293.083902] env[68244]: DEBUG nova.compute.provider_tree [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1293.094091] env[68244]: DEBUG nova.scheduler.client.report [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1293.110535] env[68244]: DEBUG nova.scheduler.client.report [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1293.213832] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9825e8-c394-4691-9ee6-bd1ed23516bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.221480] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1a80be-aa89-441d-ba7d-76ddbcdb79c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.252252] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51faae06-23c3-404a-b1d9-9f519638c168 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.259152] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149362d7-1443-4e70-ac4e-6dd29a1c8b61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.272072] env[68244]: DEBUG nova.compute.provider_tree [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1293.535350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.535661] env[68244]: DEBUG nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Instance network_info: |[{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1293.536127] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:e7:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ddc5996-0e46-45a6-996d-9bad1b9c3955', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1293.543636] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1293.544569] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1293.544820] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c34a294-9bdd-4a0b-b605-37bd0f7b0b24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.564795] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1293.564795] env[68244]: value = "task-2781373" [ 1293.564795] env[68244]: _type = "Task" [ 1293.564795] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.571975] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781373, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.803416] env[68244]: DEBUG nova.scheduler.client.report [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 168 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1293.803699] env[68244]: DEBUG nova.compute.provider_tree [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 168 to 169 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1293.803880] env[68244]: DEBUG nova.compute.provider_tree [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1293.816494] env[68244]: DEBUG nova.compute.manager [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1293.816768] env[68244]: DEBUG nova.compute.manager [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing instance network info cache due to event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1293.816965] env[68244]: DEBUG oslo_concurrency.lockutils [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.817151] env[68244]: DEBUG oslo_concurrency.lockutils [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.817367] env[68244]: DEBUG nova.network.neutron [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1294.074900] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781373, 'name': CreateVM_Task, 'duration_secs': 0.292565} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.075284] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1294.075775] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.075940] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1294.076268] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1294.076519] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b50f915b-fa1b-410b-9ab7-b2596391d4bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.081704] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1294.081704] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d0191-41bf-aa76-593b-d3345c5cd562" [ 1294.081704] env[68244]: _type = "Task" [ 1294.081704] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.089049] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d0191-41bf-aa76-593b-d3345c5cd562, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.309840] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.463s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.311645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.393s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.311844] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.325881] env[68244]: INFO nova.scheduler.client.report [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Deleted allocations for instance dfe017bb-d860-4da6-abe5-7e8d7a7dd05a [ 1294.327450] env[68244]: INFO nova.scheduler.client.report [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted allocations for instance a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1 [ 1294.515957] env[68244]: DEBUG nova.network.neutron [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updated VIF entry in instance network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1294.516380] env[68244]: DEBUG nova.network.neutron [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.593521] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d0191-41bf-aa76-593b-d3345c5cd562, 'name': SearchDatastore_Task, 'duration_secs': 0.011421} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.593785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1294.594030] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1294.594303] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.594408] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1294.594585] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1294.594854] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d616d237-92e8-42ff-be93-0845902b4acf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.603502] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1294.603678] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1294.604414] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31a27e23-0e10-4383-8a70-d90fe0671035 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.609440] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1294.609440] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52420aa3-fc05-6d84-4b46-f61fada59dab" [ 1294.609440] env[68244]: _type = "Task" [ 1294.609440] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.617013] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52420aa3-fc05-6d84-4b46-f61fada59dab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.836990] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ec4efafd-2c00-4da0-a46e-6c045eb5f2a9 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "dfe017bb-d860-4da6-abe5-7e8d7a7dd05a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.620s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.837989] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1a6e3e2a-b189-43b3-8c7d-90e99b070af9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.544s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.019096] env[68244]: DEBUG oslo_concurrency.lockutils [req-76084d95-1ff2-47a4-acf7-2568abba1d50 req-6151584d-89a0-4da8-b08f-adf7f47025b3 service nova] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.120685] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52420aa3-fc05-6d84-4b46-f61fada59dab, 'name': SearchDatastore_Task, 'duration_secs': 0.00839} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.121402] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61ed5568-28d3-411e-990a-f92f8251f527 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.126157] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1295.126157] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d5808f-af3f-f2c6-fadb-a378e6dca632" [ 1295.126157] env[68244]: _type = "Task" [ 1295.126157] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.133197] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d5808f-af3f-f2c6-fadb-a378e6dca632, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.333542] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "ae4d8900-3185-4747-ba8d-fe334d9e3237" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.333793] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.636652] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d5808f-af3f-f2c6-fadb-a378e6dca632, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.637378] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.637378] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0c949d23-d98f-47d2-9f3c-d520df035d55/0c949d23-d98f-47d2-9f3c-d520df035d55.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1295.637557] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-919eca97-6c7a-4cc9-bd47-358900f58376 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.644718] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1295.644718] env[68244]: value = "task-2781374" [ 1295.644718] env[68244]: _type = "Task" [ 1295.644718] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.652043] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.835844] env[68244]: DEBUG nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1296.154767] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781374, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458815} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.155090] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 0c949d23-d98f-47d2-9f3c-d520df035d55/0c949d23-d98f-47d2-9f3c-d520df035d55.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1296.155551] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1296.155815] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39e5ff2f-2ec0-4f36-9b0e-b782cfe27c09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.162441] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1296.162441] env[68244]: value = "task-2781375" [ 1296.162441] env[68244]: _type = "Task" [ 1296.162441] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.171543] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.359734] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.360294] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.361931] env[68244]: INFO nova.compute.claims [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1296.672452] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067044} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.672725] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1296.673527] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03559b6c-7d07-4125-883b-9263d1f52982 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.695253] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 0c949d23-d98f-47d2-9f3c-d520df035d55/0c949d23-d98f-47d2-9f3c-d520df035d55.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1296.695638] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faeb692b-1c64-443a-b030-7d4fdb3a3f6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.714926] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1296.714926] env[68244]: value = "task-2781376" [ 1296.714926] env[68244]: _type = "Task" [ 1296.714926] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.722070] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.217846] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.218192] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.229107] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781376, 'name': ReconfigVM_Task, 'duration_secs': 0.31771} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.229855] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 0c949d23-d98f-47d2-9f3c-d520df035d55/0c949d23-d98f-47d2-9f3c-d520df035d55.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1297.230539] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-958d986d-3f92-4d8a-8681-ef3749350924 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.238631] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1297.238631] env[68244]: value = "task-2781377" [ 1297.238631] env[68244]: _type = "Task" [ 1297.238631] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.246995] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781377, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.476955] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111916ce-4fc7-4730-84bb-57532bf0870e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.484548] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d57b01-042e-4639-aeba-065bef070bbb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.177384] env[68244]: DEBUG nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1298.183851] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c122d5d-9f8f-4b3d-9031-2fff79379eb2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.191470] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781377, 'name': Rename_Task, 'duration_secs': 0.145203} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.193464] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1298.193707] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6668dfc3-da7e-4177-a6d0-fd82427dda48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.195892] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cde3226-8dc5-40f7-a22c-7560112b3854 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.208876] env[68244]: DEBUG nova.compute.provider_tree [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1298.211012] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1298.211012] env[68244]: value = "task-2781378" [ 1298.211012] env[68244]: _type = "Task" [ 1298.211012] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.218548] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781378, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.697298] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.723078] env[68244]: DEBUG oslo_vmware.api [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781378, 'name': PowerOnVM_Task, 'duration_secs': 0.459794} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.723376] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1298.723582] env[68244]: INFO nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Took 7.14 seconds to spawn the instance on the hypervisor. [ 1298.723757] env[68244]: DEBUG nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1298.724684] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4672274-442b-46cc-a8cc-f3f1fb58ab3c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.728993] env[68244]: ERROR nova.scheduler.client.report [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [req-eb56c86f-0e3d-465a-a9ac-c2f7b9808664] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eb56c86f-0e3d-465a-a9ac-c2f7b9808664"}]} [ 1298.744258] env[68244]: DEBUG nova.scheduler.client.report [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1298.760419] env[68244]: DEBUG nova.scheduler.client.report [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1298.760699] env[68244]: DEBUG nova.compute.provider_tree [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1298.772389] env[68244]: DEBUG nova.scheduler.client.report [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1298.790450] env[68244]: DEBUG nova.scheduler.client.report [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1298.886671] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520f8663-e0a0-4670-b3f8-effe31b385d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.894044] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e96d67-ea21-4702-ab29-7d623d44ae35 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.924027] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdedb5e-b083-4452-99fe-4e0883183485 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.930930] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11350e2b-5240-4f69-8c34-64ad5cd97a3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.943619] env[68244]: DEBUG nova.compute.provider_tree [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1299.245391] env[68244]: INFO nova.compute.manager [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Took 14.26 seconds to build instance. [ 1299.473304] env[68244]: DEBUG nova.scheduler.client.report [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1299.473576] env[68244]: DEBUG nova.compute.provider_tree [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 171 to 172 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1299.473752] env[68244]: DEBUG nova.compute.provider_tree [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1299.747915] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0c3fdf6d-d1c4-4bcb-a9a1-0cdc547c46bd tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.772s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.979060] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.619s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.979664] env[68244]: DEBUG nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1299.982590] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.285s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.984171] env[68244]: INFO nova.compute.claims [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1300.470273] env[68244]: DEBUG nova.compute.manager [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1300.471190] env[68244]: DEBUG nova.compute.manager [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing instance network info cache due to event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1300.471190] env[68244]: DEBUG oslo_concurrency.lockutils [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.471190] env[68244]: DEBUG oslo_concurrency.lockutils [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.471190] env[68244]: DEBUG nova.network.neutron [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1300.489217] env[68244]: DEBUG nova.compute.utils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1300.493124] env[68244]: DEBUG nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1300.493366] env[68244]: DEBUG nova.network.neutron [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1300.546285] env[68244]: DEBUG nova.policy [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dd4fe2dbf154c1791b0bf2e9744629a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a16375181ca41fead00ee23bd2a9af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1300.987295] env[68244]: DEBUG nova.network.neutron [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Successfully created port: e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1300.994464] env[68244]: DEBUG nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1301.155457] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d11101-4dfe-49de-8c7f-acae16e4a7c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.165632] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4a568d-3a81-43c8-b277-bd00b85e3e1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.198739] env[68244]: DEBUG nova.network.neutron [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updated VIF entry in instance network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1301.198994] env[68244]: DEBUG nova.network.neutron [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.201493] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681f3154-ea82-4c5c-816c-cc7d0d677ffd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.209594] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fede919-efac-4a0e-bce3-ff466fe3c563 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.225649] env[68244]: DEBUG nova.compute.provider_tree [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.701456] env[68244]: DEBUG oslo_concurrency.lockutils [req-120493b4-8d7a-4436-bdbb-5835ec92ae15 req-55281d35-9a5f-4b47-806e-fe4b183f5c90 service nova] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.728753] env[68244]: DEBUG nova.scheduler.client.report [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1302.006722] env[68244]: DEBUG nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1302.033017] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1302.033206] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.033361] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1302.033616] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.033796] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1302.033982] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1302.034252] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1302.034450] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1302.034650] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1302.034886] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1302.035072] env[68244]: DEBUG nova.virt.hardware [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1302.035986] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b862d7-35f2-4b20-b970-f05c96e8a21f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.044161] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3f7811-34d3-46ab-862e-72e27b9bb475 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.233709] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.251s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.234213] env[68244]: DEBUG nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1302.500942] env[68244]: DEBUG nova.compute.manager [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1302.501167] env[68244]: DEBUG nova.compute.manager [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing instance network info cache due to event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1302.501381] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.501529] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1302.501686] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1302.714302] env[68244]: DEBUG nova.network.neutron [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Successfully updated port: e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1302.739600] env[68244]: DEBUG nova.compute.utils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1302.741806] env[68244]: DEBUG nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1302.742176] env[68244]: DEBUG nova.network.neutron [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1302.778093] env[68244]: DEBUG nova.policy [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8136fdb17a934648ace23cbae52d7af7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '297edd7cb8934787b815a3230f85b139', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1303.031308] env[68244]: DEBUG nova.network.neutron [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Successfully created port: 9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1303.212370] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updated VIF entry in instance network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1303.212733] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.222019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.222019] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.222019] env[68244]: DEBUG nova.network.neutron [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.244882] env[68244]: DEBUG nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1303.715763] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1303.716121] env[68244]: DEBUG nova.compute.manager [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1303.716206] env[68244]: DEBUG nova.compute.manager [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing instance network info cache due to event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1303.716415] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.716555] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.716713] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1303.753374] env[68244]: DEBUG nova.network.neutron [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1303.874436] env[68244]: DEBUG nova.network.neutron [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.254652] env[68244]: DEBUG nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1304.282514] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1304.282778] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.282940] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1304.283137] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.283398] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1304.283481] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1304.283691] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1304.283849] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1304.284085] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1304.284248] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1304.284394] env[68244]: DEBUG nova.virt.hardware [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1304.285252] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9906e1-a005-44a6-a161-6f764bec61e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.297104] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5278259-0e39-4433-9275-442ba10ca5d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.377079] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.377522] env[68244]: DEBUG nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Instance network_info: |[{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1304.377827] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:d5:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1662b1e-6c27-4782-bc05-758a8a5c71f2', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1304.385415] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1304.387735] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1304.387980] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2d0b97d-de7d-4933-bb11-8700816538ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.410846] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1304.410846] env[68244]: value = "task-2781379" [ 1304.410846] env[68244]: _type = "Task" [ 1304.410846] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.418040] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781379, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.434210] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updated VIF entry in instance network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1304.434561] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.527688] env[68244]: DEBUG nova.compute.manager [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Received event network-vif-plugged-e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1304.527914] env[68244]: DEBUG oslo_concurrency.lockutils [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] Acquiring lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.528146] env[68244]: DEBUG oslo_concurrency.lockutils [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.528319] env[68244]: DEBUG oslo_concurrency.lockutils [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.528483] env[68244]: DEBUG nova.compute.manager [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] No waiting events found dispatching network-vif-plugged-e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1304.528699] env[68244]: WARNING nova.compute.manager [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Received unexpected event network-vif-plugged-e1662b1e-6c27-4782-bc05-758a8a5c71f2 for instance with vm_state building and task_state spawning. [ 1304.528804] env[68244]: DEBUG nova.compute.manager [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Received event network-changed-e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1304.528954] env[68244]: DEBUG nova.compute.manager [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Refreshing instance network info cache due to event network-changed-e1662b1e-6c27-4782-bc05-758a8a5c71f2. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1304.529157] env[68244]: DEBUG oslo_concurrency.lockutils [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] Acquiring lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.529294] env[68244]: DEBUG oslo_concurrency.lockutils [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] Acquired lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.529499] env[68244]: DEBUG nova.network.neutron [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Refreshing network info cache for port e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1304.794528] env[68244]: DEBUG nova.network.neutron [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Successfully updated port: 9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.920750] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781379, 'name': CreateVM_Task, 'duration_secs': 0.304446} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.920904] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1304.921590] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.921756] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.922138] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1304.922358] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b708a43-1f08-498a-a04c-bfb9d72fb0a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.926583] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1304.926583] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5283cde8-3a14-b7a4-3b32-9c2add9d7166" [ 1304.926583] env[68244]: _type = "Task" [ 1304.926583] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.933717] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5283cde8-3a14-b7a4-3b32-9c2add9d7166, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.937240] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.937464] env[68244]: DEBUG nova.compute.manager [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1304.937625] env[68244]: DEBUG nova.compute.manager [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing instance network info cache due to event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1304.937819] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.937957] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.938133] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1305.203886] env[68244]: DEBUG nova.network.neutron [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updated VIF entry in instance network info cache for port e1662b1e-6c27-4782-bc05-758a8a5c71f2. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1305.204264] env[68244]: DEBUG nova.network.neutron [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.298788] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.299055] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.299291] env[68244]: DEBUG nova.network.neutron [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.437464] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5283cde8-3a14-b7a4-3b32-9c2add9d7166, 'name': SearchDatastore_Task, 'duration_secs': 0.01211} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.437718] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.437925] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1305.438177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.438330] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.438509] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1305.438766] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85154b09-a9b1-44b9-8096-a61783415473 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.449620] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1305.449783] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1305.450523] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb3c64c7-191e-4d64-8d82-a6e2a98aed70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.455893] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1305.455893] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52162386-7120-3f9d-19b5-30747d4df6b2" [ 1305.455893] env[68244]: _type = "Task" [ 1305.455893] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.463139] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52162386-7120-3f9d-19b5-30747d4df6b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.700320] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updated VIF entry in instance network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1305.700720] env[68244]: DEBUG nova.network.neutron [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.708292] env[68244]: DEBUG oslo_concurrency.lockutils [req-9ad42d1f-5306-4a3a-a225-a5fc158dd8da req-b3ee087a-41de-489e-bc11-2f509beac555 service nova] Releasing lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.831338] env[68244]: DEBUG nova.network.neutron [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1305.947785] env[68244]: DEBUG nova.network.neutron [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updating instance_info_cache with network_info: [{"id": "9391d531-b415-45c2-8310-0bed3f83b727", "address": "fa:16:3e:e8:38:53", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9391d531-b4", "ovs_interfaceid": "9391d531-b415-45c2-8310-0bed3f83b727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.967397] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52162386-7120-3f9d-19b5-30747d4df6b2, 'name': SearchDatastore_Task, 'duration_secs': 0.008831} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.968150] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c479ac13-830a-477f-bcf7-599713ea3a02 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.973553] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1305.973553] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a50fa3-0843-2349-439f-0d5f7872b136" [ 1305.973553] env[68244]: _type = "Task" [ 1305.973553] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.981159] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a50fa3-0843-2349-439f-0d5f7872b136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.203380] env[68244]: DEBUG oslo_concurrency.lockutils [req-f967b416-aead-487d-8505-e2599b8cdd49 req-090ade7f-bb7f-49c7-bfd5-d39523f0fa62 service nova] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.450256] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.450604] env[68244]: DEBUG nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Instance network_info: |[{"id": "9391d531-b415-45c2-8310-0bed3f83b727", "address": "fa:16:3e:e8:38:53", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9391d531-b4", "ovs_interfaceid": "9391d531-b415-45c2-8310-0bed3f83b727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1306.451058] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:38:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9391d531-b415-45c2-8310-0bed3f83b727', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.458364] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1306.458566] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1306.458796] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-977b2d8e-e637-4637-8ece-9b4da524f50a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.478869] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.478869] env[68244]: value = "task-2781380" [ 1306.478869] env[68244]: _type = "Task" [ 1306.478869] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.485715] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a50fa3-0843-2349-439f-0d5f7872b136, 'name': SearchDatastore_Task, 'duration_secs': 0.009674} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.486250] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.486500] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1306.486729] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b05239f-ba2d-4fd7-91ad-602744133430 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.490863] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781380, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.495430] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1306.495430] env[68244]: value = "task-2781381" [ 1306.495430] env[68244]: _type = "Task" [ 1306.495430] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.502274] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.558675] env[68244]: DEBUG nova.compute.manager [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Received event network-vif-plugged-9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1306.558931] env[68244]: DEBUG oslo_concurrency.lockutils [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.559271] env[68244]: DEBUG oslo_concurrency.lockutils [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.559459] env[68244]: DEBUG oslo_concurrency.lockutils [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.559696] env[68244]: DEBUG nova.compute.manager [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] No waiting events found dispatching network-vif-plugged-9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1306.559906] env[68244]: WARNING nova.compute.manager [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Received unexpected event network-vif-plugged-9391d531-b415-45c2-8310-0bed3f83b727 for instance with vm_state building and task_state spawning. [ 1306.560125] env[68244]: DEBUG nova.compute.manager [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Received event network-changed-9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1306.560322] env[68244]: DEBUG nova.compute.manager [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Refreshing instance network info cache due to event network-changed-9391d531-b415-45c2-8310-0bed3f83b727. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1306.560530] env[68244]: DEBUG oslo_concurrency.lockutils [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] Acquiring lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.560691] env[68244]: DEBUG oslo_concurrency.lockutils [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] Acquired lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.560878] env[68244]: DEBUG nova.network.neutron [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Refreshing network info cache for port 9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1306.988739] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781380, 'name': CreateVM_Task, 'duration_secs': 0.408543} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.989106] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.989518] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.989680] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.990033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1306.990264] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d62a2711-b616-409d-a66e-02a7ec8c971f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.994249] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1306.994249] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f5eccb-ef08-df24-fbc7-0c35cbd129ac" [ 1306.994249] env[68244]: _type = "Task" [ 1306.994249] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.004302] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f5eccb-ef08-df24-fbc7-0c35cbd129ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.007218] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781381, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.233193] env[68244]: DEBUG nova.network.neutron [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updated VIF entry in instance network info cache for port 9391d531-b415-45c2-8310-0bed3f83b727. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.233572] env[68244]: DEBUG nova.network.neutron [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updating instance_info_cache with network_info: [{"id": "9391d531-b415-45c2-8310-0bed3f83b727", "address": "fa:16:3e:e8:38:53", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9391d531-b4", "ovs_interfaceid": "9391d531-b415-45c2-8310-0bed3f83b727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.508296] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f5eccb-ef08-df24-fbc7-0c35cbd129ac, 'name': SearchDatastore_Task, 'duration_secs': 0.00942} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.511805] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.512130] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.512396] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.512548] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.512729] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.513154] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781381, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.513230] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b433d5c-403f-4898-ab65-bdbd971a2b41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.520969] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.521158] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1307.521841] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bc157eb-2142-4e15-90da-5a1be857f07c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.527106] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1307.527106] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5244ebde-eeba-498b-ff0d-1ef01e915267" [ 1307.527106] env[68244]: _type = "Task" [ 1307.527106] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.534142] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5244ebde-eeba-498b-ff0d-1ef01e915267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.735715] env[68244]: DEBUG oslo_concurrency.lockutils [req-beab5351-fbe6-40af-94c9-7148681bfe95 req-50dc6949-6f4a-4907-af99-25eeffad49ba service nova] Releasing lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.007971] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781381, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.445523} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.008277] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1308.008456] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1308.008696] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a63d4e40-44e9-4fc7-a909-ed0608add28e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.015354] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1308.015354] env[68244]: value = "task-2781382" [ 1308.015354] env[68244]: _type = "Task" [ 1308.015354] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.023580] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.035145] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5244ebde-eeba-498b-ff0d-1ef01e915267, 'name': SearchDatastore_Task, 'duration_secs': 0.008967} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.035855] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b60e46f-f79f-4771-a622-9ce1746dafe7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.040728] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1308.040728] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524382c0-6db6-bbd1-6c23-e670e69bccbb" [ 1308.040728] env[68244]: _type = "Task" [ 1308.040728] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.047945] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524382c0-6db6-bbd1-6c23-e670e69bccbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.527116] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781382, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071919} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.527116] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1308.527590] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ea438e-8302-458b-9d09-115aeb0abd0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.548835] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.549188] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-406ecacd-a085-4dbe-9e43-9486bfa7c483 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.572062] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]524382c0-6db6-bbd1-6c23-e670e69bccbb, 'name': SearchDatastore_Task, 'duration_secs': 0.00926} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.573336] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.573599] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d4adee86-43f1-4d6f-a4a5-8cce39e1f03e/d4adee86-43f1-4d6f-a4a5-8cce39e1f03e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1308.573913] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1308.573913] env[68244]: value = "task-2781383" [ 1308.573913] env[68244]: _type = "Task" [ 1308.573913] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.574116] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed71c4cc-537e-4b22-ac78-885d932e62c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.582414] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1308.582414] env[68244]: value = "task-2781384" [ 1308.582414] env[68244]: _type = "Task" [ 1308.582414] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.585586] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781383, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.593228] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.086154] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781383, 'name': ReconfigVM_Task, 'duration_secs': 0.267173} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.089264] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfigured VM instance instance-00000070 to attach disk [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1309.089998] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3ab5ae0-e75d-4c20-bb1c-25c66d38a531 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.096173] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.097344] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1309.097344] env[68244]: value = "task-2781385" [ 1309.097344] env[68244]: _type = "Task" [ 1309.097344] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.105060] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781385, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.595205] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781384, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.880785} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.595539] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d4adee86-43f1-4d6f-a4a5-8cce39e1f03e/d4adee86-43f1-4d6f-a4a5-8cce39e1f03e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1309.595746] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1309.595988] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a15fc283-249e-4e26-b18d-cb563571f296 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.605751] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781385, 'name': Rename_Task, 'duration_secs': 0.169992} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.606785] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1309.607081] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1309.607081] env[68244]: value = "task-2781386" [ 1309.607081] env[68244]: _type = "Task" [ 1309.607081] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.607284] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2588fc7-4b81-4ad8-aa71-c562b6f4bf0b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.615889] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.617041] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1309.617041] env[68244]: value = "task-2781387" [ 1309.617041] env[68244]: _type = "Task" [ 1309.617041] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.623954] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781387, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.118923] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066581} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.122167] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1310.122957] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddd97b8-4eda-4d33-af40-1b7e95401b0a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.131521] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781387, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.149083] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] d4adee86-43f1-4d6f-a4a5-8cce39e1f03e/d4adee86-43f1-4d6f-a4a5-8cce39e1f03e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1310.149363] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21d15ba6-03ba-4965-98e1-b5ebeb58964e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.167820] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1310.167820] env[68244]: value = "task-2781388" [ 1310.167820] env[68244]: _type = "Task" [ 1310.167820] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.175795] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781388, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.628265] env[68244]: DEBUG oslo_vmware.api [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781387, 'name': PowerOnVM_Task, 'duration_secs': 0.765482} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.628537] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1310.628734] env[68244]: INFO nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1310.628911] env[68244]: DEBUG nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1310.629685] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329868c7-bcfa-459c-8a4f-39e2a4dc27fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.678117] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781388, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.920078] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.920078] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.148199] env[68244]: INFO nova.compute.manager [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Took 14.81 seconds to build instance. [ 1311.177863] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781388, 'name': ReconfigVM_Task, 'duration_secs': 0.836422} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.178144] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfigured VM instance instance-00000071 to attach disk [datastore2] d4adee86-43f1-4d6f-a4a5-8cce39e1f03e/d4adee86-43f1-4d6f-a4a5-8cce39e1f03e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1311.178755] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5c3f484-890d-4640-b610-6d2a368ad29d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.185136] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1311.185136] env[68244]: value = "task-2781389" [ 1311.185136] env[68244]: _type = "Task" [ 1311.185136] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.192285] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781389, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.422425] env[68244]: INFO nova.compute.manager [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Detaching volume d602d1e3-8fef-4f67-a2e3-751cb584e75f [ 1311.454635] env[68244]: INFO nova.virt.block_device [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Attempting to driver detach volume d602d1e3-8fef-4f67-a2e3-751cb584e75f from mountpoint /dev/sdb [ 1311.454878] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1311.455083] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1311.455983] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8ee3e4-48d0-4d94-a8d3-bc24256fb73c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.481415] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e6cee3-4c00-4756-b760-3ce78028a2fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.488506] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4b37ec-67b2-401f-8568-87f376c4483e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.510048] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65ffbe7-a4e9-40f7-b63d-1aef3d11b80f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.524399] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] The volume has not been displaced from its original location: [datastore2] volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f/volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1311.529723] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1311.529997] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d8c8df7-533d-40d4-89a7-2e0fe5e17952 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.547651] env[68244]: DEBUG oslo_vmware.api [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1311.547651] env[68244]: value = "task-2781390" [ 1311.547651] env[68244]: _type = "Task" [ 1311.547651] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.555143] env[68244]: DEBUG oslo_vmware.api [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.650092] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8c1ec197-84b2-4171-bd46-724e827ff596 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.316s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.694507] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781389, 'name': Rename_Task, 'duration_secs': 0.266595} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.694784] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1311.695040] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a038ade-3054-434e-8ae9-5256b1f70a79 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.700793] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1311.700793] env[68244]: value = "task-2781391" [ 1311.700793] env[68244]: _type = "Task" [ 1311.700793] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.710068] env[68244]: DEBUG nova.compute.manager [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Received event network-changed-e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1311.710286] env[68244]: DEBUG nova.compute.manager [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Refreshing instance network info cache due to event network-changed-e1662b1e-6c27-4782-bc05-758a8a5c71f2. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1311.710503] env[68244]: DEBUG oslo_concurrency.lockutils [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] Acquiring lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.710647] env[68244]: DEBUG oslo_concurrency.lockutils [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] Acquired lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.710805] env[68244]: DEBUG nova.network.neutron [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Refreshing network info cache for port e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.712202] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.057561] env[68244]: DEBUG oslo_vmware.api [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781390, 'name': ReconfigVM_Task, 'duration_secs': 0.349647} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.057818] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1312.062566] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7b558cc-03d7-4475-af2c-15b7c2d54b3f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.077136] env[68244]: DEBUG oslo_vmware.api [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1312.077136] env[68244]: value = "task-2781392" [ 1312.077136] env[68244]: _type = "Task" [ 1312.077136] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.080404] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1312.085114] env[68244]: DEBUG oslo_vmware.api [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.210830] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781391, 'name': PowerOnVM_Task} progress is 98%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.424461] env[68244]: DEBUG nova.network.neutron [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updated VIF entry in instance network info cache for port e1662b1e-6c27-4782-bc05-758a8a5c71f2. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1312.424461] env[68244]: DEBUG nova.network.neutron [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.586469] env[68244]: DEBUG oslo_vmware.api [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781392, 'name': ReconfigVM_Task, 'duration_secs': 0.144746} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.586807] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559176', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'name': 'volume-d602d1e3-8fef-4f67-a2e3-751cb584e75f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0597e8ed-2f24-44c7-ac92-06af34d6a4fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f', 'serial': 'd602d1e3-8fef-4f67-a2e3-751cb584e75f'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1312.713378] env[68244]: DEBUG oslo_vmware.api [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781391, 'name': PowerOnVM_Task, 'duration_secs': 0.847443} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.713641] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.713837] env[68244]: INFO nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1312.714014] env[68244]: DEBUG nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1312.714818] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76d6f02-dca1-42cb-9926-5baa1e6c51f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.927681] env[68244]: DEBUG oslo_concurrency.lockutils [req-d844b741-cb3a-499a-80ac-d9a0b671fd94 req-fe818bee-d04c-4ced-a80b-d912f6de61af service nova] Releasing lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.128714] env[68244]: DEBUG nova.objects.instance [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'flavor' on Instance uuid 0597e8ed-2f24-44c7-ac92-06af34d6a4fa {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1313.232041] env[68244]: INFO nova.compute.manager [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Took 14.55 seconds to build instance. [ 1313.733924] env[68244]: DEBUG oslo_concurrency.lockutils [None req-c8f8255b-d244-4c42-9baa-3da6257be94e tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.516s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.796990] env[68244]: DEBUG nova.compute.manager [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Received event network-changed-9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1313.797231] env[68244]: DEBUG nova.compute.manager [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Refreshing instance network info cache due to event network-changed-9391d531-b415-45c2-8310-0bed3f83b727. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1313.797449] env[68244]: DEBUG oslo_concurrency.lockutils [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] Acquiring lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.797593] env[68244]: DEBUG oslo_concurrency.lockutils [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] Acquired lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.797751] env[68244]: DEBUG nova.network.neutron [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Refreshing network info cache for port 9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.937378] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.937649] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.937865] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.938071] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.938231] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.940309] env[68244]: INFO nova.compute.manager [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Terminating instance [ 1314.135803] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ceab60d-140d-486f-a069-ec6ee397a963 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.216s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.444102] env[68244]: DEBUG nova.compute.manager [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1314.444525] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1314.445444] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22158e2f-f7cc-4c75-8460-2df65a3c4be8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.453700] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.453960] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-235e0e92-f0cc-4874-bb2d-87ef4baae1c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.464949] env[68244]: DEBUG oslo_vmware.api [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1314.464949] env[68244]: value = "task-2781393" [ 1314.464949] env[68244]: _type = "Task" [ 1314.464949] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.475445] env[68244]: DEBUG oslo_vmware.api [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.539070] env[68244]: DEBUG nova.network.neutron [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updated VIF entry in instance network info cache for port 9391d531-b415-45c2-8310-0bed3f83b727. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1314.539458] env[68244]: DEBUG nova.network.neutron [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updating instance_info_cache with network_info: [{"id": "9391d531-b415-45c2-8310-0bed3f83b727", "address": "fa:16:3e:e8:38:53", "network": {"id": "029eee07-b6b0-4cbe-99cd-d9f46e93737f", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1133160607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "297edd7cb8934787b815a3230f85b139", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9391d531-b4", "ovs_interfaceid": "9391d531-b415-45c2-8310-0bed3f83b727", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.975948] env[68244]: DEBUG oslo_vmware.api [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781393, 'name': PowerOffVM_Task, 'duration_secs': 0.212363} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.977249] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1314.977520] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1314.977821] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1703869a-7de1-438c-95ba-c61cb2b31202 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.040694] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1315.040959] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1315.041185] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleting the datastore file [datastore2] f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1315.041760] env[68244]: DEBUG oslo_concurrency.lockutils [req-adad230f-b21d-4cb8-8425-6e9a44c94d9b req-b6885352-3d55-4081-8fdf-1148dfeebf0b service nova] Releasing lock "refresh_cache-d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.042155] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f52f6713-8ce7-48ff-893a-8621c0b825a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.049165] env[68244]: DEBUG oslo_vmware.api [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for the task: (returnval){ [ 1315.049165] env[68244]: value = "task-2781395" [ 1315.049165] env[68244]: _type = "Task" [ 1315.049165] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.057092] env[68244]: DEBUG oslo_vmware.api [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.077712] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.077957] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.214838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.215143] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.215343] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.215529] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.215697] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.217801] env[68244]: INFO nova.compute.manager [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Terminating instance [ 1315.559029] env[68244]: DEBUG oslo_vmware.api [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Task: {'id': task-2781395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132803} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.559308] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1315.559498] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1315.559676] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1315.559849] env[68244]: INFO nova.compute.manager [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1315.560103] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1315.560376] env[68244]: DEBUG nova.compute.manager [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1315.560473] env[68244]: DEBUG nova.network.neutron [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1315.581276] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.581496] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.581676] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.581903] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1315.582939] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20267767-d85c-4fc6-9553-c6a81d0f68a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.592681] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3936fec9-428c-4768-b615-53ddaddb5dde {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.606911] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4db596-6f03-497c-96e3-f9ebd4749056 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.612994] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603ee7f5-24b9-4f05-aacf-be555de91b2f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.644130] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179889MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1315.644130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.644130] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.722285] env[68244]: DEBUG nova.compute.manager [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1315.722568] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1315.723482] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e57ba7-72a7-42e7-96a6-5e233c29c809 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.731624] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1315.731905] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14185b6-fe74-41b3-bc2b-b690a24754ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.739457] env[68244]: DEBUG oslo_vmware.api [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1315.739457] env[68244]: value = "task-2781396" [ 1315.739457] env[68244]: _type = "Task" [ 1315.739457] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.747771] env[68244]: DEBUG oslo_vmware.api [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781396, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.071930] env[68244]: DEBUG nova.compute.manager [req-4b1ed5f1-882d-4f5e-a218-68c386f46988 req-4e18d89d-63dc-4bea-86ab-55d2477ba7e3 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Received event network-vif-deleted-099ae899-d602-45fd-bdcf-deda125a5d3e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1316.072161] env[68244]: INFO nova.compute.manager [req-4b1ed5f1-882d-4f5e-a218-68c386f46988 req-4e18d89d-63dc-4bea-86ab-55d2477ba7e3 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Neutron deleted interface 099ae899-d602-45fd-bdcf-deda125a5d3e; detaching it from the instance and deleting it from the info cache [ 1316.072512] env[68244]: DEBUG nova.network.neutron [req-4b1ed5f1-882d-4f5e-a218-68c386f46988 req-4e18d89d-63dc-4bea-86ab-55d2477ba7e3 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.074220] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-64467948-35bb-4ad7-ac76-bbbd6f66e96f-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.074432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-64467948-35bb-4ad7-ac76-bbbd6f66e96f-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.075124] env[68244]: DEBUG nova.objects.instance [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'flavor' on Instance uuid 64467948-35bb-4ad7-ac76-bbbd6f66e96f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1316.249900] env[68244]: DEBUG oslo_vmware.api [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781396, 'name': PowerOffVM_Task, 'duration_secs': 0.358703} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.250317] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1316.250448] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1316.250676] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92f32523-8c4d-4f50-bd2a-07d1187cad38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.318127] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1316.318384] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1316.318581] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleting the datastore file [datastore2] 0597e8ed-2f24-44c7-ac92-06af34d6a4fa {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1316.318872] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3983d9c-68be-411a-a1e3-411e0b5a444f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.325652] env[68244]: DEBUG oslo_vmware.api [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1316.325652] env[68244]: value = "task-2781398" [ 1316.325652] env[68244]: _type = "Task" [ 1316.325652] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.334091] env[68244]: DEBUG oslo_vmware.api [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.554883] env[68244]: DEBUG nova.network.neutron [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.578011] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dade207a-3cc3-44f1-8e4e-7e98e88bb84e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.591106] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8056f0-7d40-4e86-b002-685ec7dcbb83 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.622374] env[68244]: DEBUG nova.compute.manager [req-4b1ed5f1-882d-4f5e-a218-68c386f46988 req-4e18d89d-63dc-4bea-86ab-55d2477ba7e3 service nova] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Detach interface failed, port_id=099ae899-d602-45fd-bdcf-deda125a5d3e, reason: Instance f1143201-5ee1-45be-b2b1-4314a26aa10a could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1316.670512] env[68244]: DEBUG nova.objects.instance [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'pci_requests' on Instance uuid 64467948-35bb-4ad7-ac76-bbbd6f66e96f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1316.677195] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 75bec02f-82f7-4e8d-81da-3c511588be29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677195] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 77ba8e47-10bb-4630-bd89-067f5ad7bad9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677195] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 0597e8ed-2f24-44c7-ac92-06af34d6a4fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677195] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance f1143201-5ee1-45be-b2b1-4314a26aa10a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677195] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 64467948-35bb-4ad7-ac76-bbbd6f66e96f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677195] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 256a0329-07b6-4bc2-a574-6e5a108d301a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677195] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 0c949d23-d98f-47d2-9f3c-d520df035d55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677473] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ae4d8900-3185-4747-ba8d-fe334d9e3237 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677473] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d4adee86-43f1-4d6f-a4a5-8cce39e1f03e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.677623] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1316.677725] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1316.787282] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a205d07-d8c9-4962-afcd-b712ae05239a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.794677] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24cdcde-d788-4a83-b375-5882c778f9f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.824974] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fc797f-355a-41b3-8f45-36b64e15429e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.838017] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccb03e8-eca6-475c-89e5-f314d2069d41 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.841749] env[68244]: DEBUG oslo_vmware.api [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134814} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.841996] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1316.842194] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1316.842401] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1316.842581] env[68244]: INFO nova.compute.manager [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1316.842820] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1316.843374] env[68244]: DEBUG nova.compute.manager [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1316.843491] env[68244]: DEBUG nova.network.neutron [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1316.853054] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.057830] env[68244]: INFO nova.compute.manager [-] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Took 1.50 seconds to deallocate network for instance. [ 1317.176478] env[68244]: DEBUG nova.objects.base [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Object Instance<64467948-35bb-4ad7-ac76-bbbd6f66e96f> lazy-loaded attributes: flavor,pci_requests {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1317.176716] env[68244]: DEBUG nova.network.neutron [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1317.239313] env[68244]: DEBUG nova.policy [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1317.357061] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1317.565026] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.785333] env[68244]: DEBUG nova.network.neutron [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.862639] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1317.863177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.219s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.863177] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.298s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.863435] env[68244]: DEBUG nova.objects.instance [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lazy-loading 'resources' on Instance uuid f1143201-5ee1-45be-b2b1-4314a26aa10a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.096742] env[68244]: DEBUG nova.compute.manager [req-a190e417-e378-4fce-9c6a-781ae56beedb req-157adeca-af79-413d-bc23-ba214bcb41c9 service nova] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Received event network-vif-deleted-7f74db30-2640-4e0b-9332-eecb85a1b8bc {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1318.287910] env[68244]: INFO nova.compute.manager [-] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Took 1.44 seconds to deallocate network for instance. [ 1318.483464] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d07e072-9704-4520-807b-e3a196ce3a8f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.490600] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2a5c06-ce59-4517-9249-a0079ccdb6e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.520042] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba4c760-3c45-4024-9903-1f90d720805b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.527362] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527891ca-9a68-444f-aa53-4fdbffdf1b06 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.540871] env[68244]: DEBUG nova.compute.provider_tree [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.603325] env[68244]: DEBUG nova.compute.manager [req-602c4254-4634-4b42-8ecc-cef5ac6dcd74 req-1b83148d-059b-4f1f-912e-81f81335cb85 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-vif-plugged-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1318.603531] env[68244]: DEBUG oslo_concurrency.lockutils [req-602c4254-4634-4b42-8ecc-cef5ac6dcd74 req-1b83148d-059b-4f1f-912e-81f81335cb85 service nova] Acquiring lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.603751] env[68244]: DEBUG oslo_concurrency.lockutils [req-602c4254-4634-4b42-8ecc-cef5ac6dcd74 req-1b83148d-059b-4f1f-912e-81f81335cb85 service nova] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.603912] env[68244]: DEBUG oslo_concurrency.lockutils [req-602c4254-4634-4b42-8ecc-cef5ac6dcd74 req-1b83148d-059b-4f1f-912e-81f81335cb85 service nova] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.604092] env[68244]: DEBUG nova.compute.manager [req-602c4254-4634-4b42-8ecc-cef5ac6dcd74 req-1b83148d-059b-4f1f-912e-81f81335cb85 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] No waiting events found dispatching network-vif-plugged-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1318.604261] env[68244]: WARNING nova.compute.manager [req-602c4254-4634-4b42-8ecc-cef5ac6dcd74 req-1b83148d-059b-4f1f-912e-81f81335cb85 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received unexpected event network-vif-plugged-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 for instance with vm_state active and task_state None. [ 1318.679685] env[68244]: DEBUG nova.network.neutron [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Successfully updated port: 5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.794724] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.044387] env[68244]: DEBUG nova.scheduler.client.report [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.182751] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.183063] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1319.183360] env[68244]: DEBUG nova.network.neutron [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.549262] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.686s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.552068] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.758s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.552383] env[68244]: DEBUG nova.objects.instance [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'resources' on Instance uuid 0597e8ed-2f24-44c7-ac92-06af34d6a4fa {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1319.569496] env[68244]: INFO nova.scheduler.client.report [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Deleted allocations for instance f1143201-5ee1-45be-b2b1-4314a26aa10a [ 1319.721644] env[68244]: WARNING nova.network.neutron [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] 30f3eb09-0134-4d69-88c7-1e99db7c1d78 already exists in list: networks containing: ['30f3eb09-0134-4d69-88c7-1e99db7c1d78']. ignoring it [ 1319.859341] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.859697] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.860060] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.860345] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.006232] env[68244]: DEBUG nova.network.neutron [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "address": "fa:16:3e:b7:17:b5", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5331c2be-2e", "ovs_interfaceid": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.078378] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f0b30e6b-aaa2-4a76-8d05-94681f86f401 tempest-AttachVolumeShelveTestJSON-1195170854 tempest-AttachVolumeShelveTestJSON-1195170854-project-member] Lock "f1143201-5ee1-45be-b2b1-4314a26aa10a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.140s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.079957] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.080231] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1320.168108] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72965958-96db-469f-a68f-3b5411421bc7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.175612] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05407834-c035-4459-b7dc-58329fe4f663 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.205672] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949fc31f-02e0-435c-ae4d-fd97c962d4e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.213012] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ae3d55-d09c-4869-a00d-5dcb6c739199 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.227147] env[68244]: DEBUG nova.compute.provider_tree [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1320.509213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.509873] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.510052] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.511138] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71410c06-cb5f-4464-8c5d-e169ca2d0c25 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.527977] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1320.528237] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1320.528395] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1320.528581] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1320.528727] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1320.528872] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1320.529086] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1320.529246] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1320.529414] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1320.529575] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1320.529742] env[68244]: DEBUG nova.virt.hardware [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1320.536194] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Reconfiguring VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1320.536501] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3708fc7-68ba-4e59-9295-00022246fdf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.553782] env[68244]: DEBUG oslo_vmware.api [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1320.553782] env[68244]: value = "task-2781399" [ 1320.553782] env[68244]: _type = "Task" [ 1320.553782] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.561256] env[68244]: DEBUG oslo_vmware.api [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781399, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.648507] env[68244]: DEBUG nova.compute.manager [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-changed-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1320.648844] env[68244]: DEBUG nova.compute.manager [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing instance network info cache due to event network-changed-5331c2be-2e5c-4d67-b2f9-1e5198f073c0. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1320.649135] env[68244]: DEBUG oslo_concurrency.lockutils [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.649330] env[68244]: DEBUG oslo_concurrency.lockutils [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.649539] env[68244]: DEBUG nova.network.neutron [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing network info cache for port 5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1320.730689] env[68244]: DEBUG nova.scheduler.client.report [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1321.064580] env[68244]: DEBUG oslo_vmware.api [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781399, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.235826] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.259789] env[68244]: INFO nova.scheduler.client.report [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted allocations for instance 0597e8ed-2f24-44c7-ac92-06af34d6a4fa [ 1321.368172] env[68244]: DEBUG nova.network.neutron [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updated VIF entry in instance network info cache for port 5331c2be-2e5c-4d67-b2f9-1e5198f073c0. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1321.368626] env[68244]: DEBUG nova.network.neutron [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "address": "fa:16:3e:b7:17:b5", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5331c2be-2e", "ovs_interfaceid": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.564432] env[68244]: DEBUG oslo_vmware.api [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781399, 'name': ReconfigVM_Task, 'duration_secs': 0.618277} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.564923] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1321.565149] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Reconfigured VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1321.769035] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b818e1bc-de96-4440-ac26-2e06bcfa1810 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "0597e8ed-2f24-44c7-ac92-06af34d6a4fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.554s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.874638] env[68244]: DEBUG oslo_concurrency.lockutils [req-6271fe0b-e31c-4e01-aaa0-0a12ec059a26 req-2e79c513-faf3-41b2-a3d7-de1c982595d6 service nova] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1322.069360] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1f1c3079-359c-445a-b55c-8cdcea262c76 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-64467948-35bb-4ad7-ac76-bbbd6f66e96f-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.995s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.263457] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-64467948-35bb-4ad7-ac76-bbbd6f66e96f-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.263762] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-64467948-35bb-4ad7-ac76-bbbd6f66e96f-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.739544] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.739766] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.766750] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.766925] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.767833] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fdbd94-c4a3-493e-82df-9e679c816482 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.786019] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c8ca36-955d-4dca-be70-564a47bac26c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.812919] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Reconfiguring VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1323.813220] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cb68421-865b-462b-bc3d-7d0835c0cf43 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.832185] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1323.832185] env[68244]: value = "task-2781401" [ 1323.832185] env[68244]: _type = "Task" [ 1323.832185] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.839670] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.051800] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "256a0329-07b6-4bc2-a574-6e5a108d301a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.052059] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.242657] env[68244]: DEBUG nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1324.343429] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.555948] env[68244]: DEBUG nova.compute.utils [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1324.766176] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.766451] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.767954] env[68244]: INFO nova.compute.claims [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1324.843239] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.058698] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.344783] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.849248] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.916884] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bccd171-4c51-4ff2-a95c-b81c7c10a360 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.916884] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69c9aa5-0768-445a-9abd-abdf8f042542 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.940132] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933c1dd1-1016-4050-bb4b-4061888f4378 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.948392] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d26d14-c72a-420f-aae9-9bd8ca0d1b54 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.962467] env[68244]: DEBUG nova.compute.provider_tree [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.124629] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "256a0329-07b6-4bc2-a574-6e5a108d301a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.124629] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.124629] env[68244]: INFO nova.compute.manager [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Attaching volume 95f17070-a4e8-4189-83cc-faed53fca935 to /dev/sdb [ 1326.157935] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05dd410-a78a-4c5a-8e9e-6a1aa62a3008 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.164523] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b9cb54-0b1d-411c-a41f-5951a1859f6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.178045] env[68244]: DEBUG nova.virt.block_device [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Updating existing volume attachment record: 336e03f8-2052-4cbe-ab3f-b9ce52ea7e46 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1326.348380] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.466286] env[68244]: DEBUG nova.scheduler.client.report [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1326.848208] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.971339] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.971861] env[68244]: DEBUG nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1327.073900] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.348135] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.476653] env[68244]: DEBUG nova.compute.utils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1327.478221] env[68244]: DEBUG nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1327.478516] env[68244]: DEBUG nova.network.neutron [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1327.522658] env[68244]: DEBUG nova.policy [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3151a146805a456da750a47964f86f2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a151f53070d94d08bf7e85617a6f5190', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1327.776660] env[68244]: DEBUG nova.network.neutron [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Successfully created port: 53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1327.849616] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.981559] env[68244]: DEBUG nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1328.350201] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.851337] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.991640] env[68244]: DEBUG nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1329.018968] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1329.019222] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1329.019379] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1329.019561] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1329.019707] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1329.019853] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1329.020074] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1329.020240] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1329.020409] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1329.020575] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1329.020758] env[68244]: DEBUG nova.virt.hardware [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1329.021613] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43383282-9f60-4fd8-abc6-87a5c1bd8a6b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.029561] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b756bf-c8b5-42dc-acfe-908f7d08bbab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.128026] env[68244]: DEBUG nova.compute.manager [req-b0431fe3-3a39-434a-afc5-4b6ab7b877a0 req-94557d99-e601-4be0-a5fb-66ed4e79260e service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Received event network-vif-plugged-53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1329.128269] env[68244]: DEBUG oslo_concurrency.lockutils [req-b0431fe3-3a39-434a-afc5-4b6ab7b877a0 req-94557d99-e601-4be0-a5fb-66ed4e79260e service nova] Acquiring lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.128480] env[68244]: DEBUG oslo_concurrency.lockutils [req-b0431fe3-3a39-434a-afc5-4b6ab7b877a0 req-94557d99-e601-4be0-a5fb-66ed4e79260e service nova] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.128646] env[68244]: DEBUG oslo_concurrency.lockutils [req-b0431fe3-3a39-434a-afc5-4b6ab7b877a0 req-94557d99-e601-4be0-a5fb-66ed4e79260e service nova] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.128816] env[68244]: DEBUG nova.compute.manager [req-b0431fe3-3a39-434a-afc5-4b6ab7b877a0 req-94557d99-e601-4be0-a5fb-66ed4e79260e service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] No waiting events found dispatching network-vif-plugged-53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1329.128979] env[68244]: WARNING nova.compute.manager [req-b0431fe3-3a39-434a-afc5-4b6ab7b877a0 req-94557d99-e601-4be0-a5fb-66ed4e79260e service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Received unexpected event network-vif-plugged-53c89888-9d51-40ce-af06-fc64566eb47d for instance with vm_state building and task_state spawning. [ 1329.208950] env[68244]: DEBUG nova.network.neutron [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Successfully updated port: 53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1329.351987] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.711799] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.711857] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1329.712024] env[68244]: DEBUG nova.network.neutron [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1329.853184] env[68244]: DEBUG oslo_vmware.api [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781401, 'name': ReconfigVM_Task, 'duration_secs': 5.769292} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.853562] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.853687] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Reconfigured VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1330.243059] env[68244]: DEBUG nova.network.neutron [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1330.362526] env[68244]: DEBUG nova.network.neutron [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Updating instance_info_cache with network_info: [{"id": "53c89888-9d51-40ce-af06-fc64566eb47d", "address": "fa:16:3e:03:0a:29", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53c89888-9d", "ovs_interfaceid": "53c89888-9d51-40ce-af06-fc64566eb47d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.724391] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1330.724622] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559185', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'name': 'volume-95f17070-a4e8-4189-83cc-faed53fca935', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '256a0329-07b6-4bc2-a574-6e5a108d301a', 'attached_at': '', 'detached_at': '', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'serial': '95f17070-a4e8-4189-83cc-faed53fca935'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1330.725545] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e065c60-9128-40ed-ba26-8cd337c3a6a7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.741580] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a52cabf-3df0-45ef-9fe4-cf15211da8e0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.765162] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] volume-95f17070-a4e8-4189-83cc-faed53fca935/volume-95f17070-a4e8-4189-83cc-faed53fca935.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.765674] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d63718a7-e3fd-4e63-837b-1538544ee90a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.783616] env[68244]: DEBUG oslo_vmware.api [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1330.783616] env[68244]: value = "task-2781404" [ 1330.783616] env[68244]: _type = "Task" [ 1330.783616] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.790640] env[68244]: DEBUG oslo_vmware.api [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781404, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.865588] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.865970] env[68244]: DEBUG nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Instance network_info: |[{"id": "53c89888-9d51-40ce-af06-fc64566eb47d", "address": "fa:16:3e:03:0a:29", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53c89888-9d", "ovs_interfaceid": "53c89888-9d51-40ce-af06-fc64566eb47d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1330.866478] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:0a:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53c89888-9d51-40ce-af06-fc64566eb47d', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1330.874528] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1330.874759] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1330.875026] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb5b0035-a656-40bb-8a42-c39027cc710e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.896967] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1330.896967] env[68244]: value = "task-2781405" [ 1330.896967] env[68244]: _type = "Task" [ 1330.896967] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.904695] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781405, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.129772] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.129957] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.130176] env[68244]: DEBUG nova.network.neutron [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1331.155316] env[68244]: DEBUG nova.compute.manager [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Received event network-changed-53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1331.155470] env[68244]: DEBUG nova.compute.manager [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Refreshing instance network info cache due to event network-changed-53c89888-9d51-40ce-af06-fc64566eb47d. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1331.155683] env[68244]: DEBUG oslo_concurrency.lockutils [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] Acquiring lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.155823] env[68244]: DEBUG oslo_concurrency.lockutils [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] Acquired lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.156457] env[68244]: DEBUG nova.network.neutron [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Refreshing network info cache for port 53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1331.293712] env[68244]: DEBUG oslo_vmware.api [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781404, 'name': ReconfigVM_Task, 'duration_secs': 0.323158} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.294039] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Reconfigured VM instance instance-0000006e to attach disk [datastore2] volume-95f17070-a4e8-4189-83cc-faed53fca935/volume-95f17070-a4e8-4189-83cc-faed53fca935.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.299275] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-464d2ba0-ef35-4213-a2be-04f9f92ff57b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.314787] env[68244]: DEBUG oslo_vmware.api [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1331.314787] env[68244]: value = "task-2781406" [ 1331.314787] env[68244]: _type = "Task" [ 1331.314787] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.323773] env[68244]: DEBUG oslo_vmware.api [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781406, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.407035] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781405, 'name': CreateVM_Task, 'duration_secs': 0.353038} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.411031] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1331.411031] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.411031] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.411031] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1331.411031] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9db19ab5-fa86-4fa4-a4f7-6a7aa57168e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.414445] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1331.414445] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ca147a-1095-88c2-ab65-299487b0de89" [ 1331.414445] env[68244]: _type = "Task" [ 1331.414445] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.423062] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ca147a-1095-88c2-ab65-299487b0de89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.826591] env[68244]: DEBUG oslo_vmware.api [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781406, 'name': ReconfigVM_Task, 'duration_secs': 0.127401} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.826929] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559185', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'name': 'volume-95f17070-a4e8-4189-83cc-faed53fca935', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '256a0329-07b6-4bc2-a574-6e5a108d301a', 'attached_at': '', 'detached_at': '', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'serial': '95f17070-a4e8-4189-83cc-faed53fca935'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1331.841309] env[68244]: INFO nova.network.neutron [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Port 5331c2be-2e5c-4d67-b2f9-1e5198f073c0 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1331.841616] env[68244]: DEBUG nova.network.neutron [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.925635] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ca147a-1095-88c2-ab65-299487b0de89, 'name': SearchDatastore_Task, 'duration_secs': 0.009648} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.926091] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1331.926290] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.926569] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.926757] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.926996] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.927289] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b517835d-14f4-40ac-8cfd-2dccbb9da859 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.935480] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.935661] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.936371] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1659ec16-c7d3-44e2-8283-8c72b8b44125 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.941311] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1331.941311] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526cf99d-5993-e171-dad5-d7b4b3a18fa3" [ 1331.941311] env[68244]: _type = "Task" [ 1331.941311] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.950643] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526cf99d-5993-e171-dad5-d7b4b3a18fa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.214046] env[68244]: DEBUG nova.compute.manager [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1332.214336] env[68244]: DEBUG nova.compute.manager [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing instance network info cache due to event network-changed-c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1332.214471] env[68244]: DEBUG oslo_concurrency.lockutils [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] Acquiring lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.236026] env[68244]: DEBUG nova.network.neutron [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Updated VIF entry in instance network info cache for port 53c89888-9d51-40ce-af06-fc64566eb47d. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1332.236294] env[68244]: DEBUG nova.network.neutron [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Updating instance_info_cache with network_info: [{"id": "53c89888-9d51-40ce-af06-fc64566eb47d", "address": "fa:16:3e:03:0a:29", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53c89888-9d", "ovs_interfaceid": "53c89888-9d51-40ce-af06-fc64566eb47d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.343787] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.346902] env[68244]: DEBUG oslo_concurrency.lockutils [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] Acquired lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1332.347172] env[68244]: DEBUG nova.network.neutron [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Refreshing network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1332.451286] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]526cf99d-5993-e171-dad5-d7b4b3a18fa3, 'name': SearchDatastore_Task, 'duration_secs': 0.008367} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.452097] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48a23fdb-9566-44db-9eb8-9478bf635aeb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.456965] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1332.456965] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e830a6-3176-0f27-84b6-af2ba36fffc6" [ 1332.456965] env[68244]: _type = "Task" [ 1332.456965] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.464276] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e830a6-3176-0f27-84b6-af2ba36fffc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.738649] env[68244]: DEBUG oslo_concurrency.lockutils [req-d65a43ce-726f-4648-b631-cf795b85a2f9 req-8946523a-1ee3-4127-b5ad-e1490256a785 service nova] Releasing lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.850124] env[68244]: DEBUG oslo_concurrency.lockutils [None req-590bd6c9-750c-4029-890f-7aa68a33e9f1 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-64467948-35bb-4ad7-ac76-bbbd6f66e96f-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.586s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.864811] env[68244]: DEBUG nova.objects.instance [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'flavor' on Instance uuid 256a0329-07b6-4bc2-a574-6e5a108d301a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.898830] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-0c949d23-d98f-47d2-9f3c-d520df035d55-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1332.899099] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-0c949d23-d98f-47d2-9f3c-d520df035d55-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1332.899500] env[68244]: DEBUG nova.objects.instance [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'flavor' on Instance uuid 0c949d23-d98f-47d2-9f3c-d520df035d55 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.968461] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e830a6-3176-0f27-84b6-af2ba36fffc6, 'name': SearchDatastore_Task, 'duration_secs': 0.008981} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.968687] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.968827] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715/3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1332.969117] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d34399b-aa75-496e-b70d-7357dd2a142b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.980036] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1332.980036] env[68244]: value = "task-2781407" [ 1332.980036] env[68244]: _type = "Task" [ 1332.980036] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.985641] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.050446] env[68244]: DEBUG nova.network.neutron [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updated VIF entry in instance network info cache for port c38bbec1-b6bf-4b43-9914-b4dc63d8e894. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1333.050831] env[68244]: DEBUG nova.network.neutron [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [{"id": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "address": "fa:16:3e:0d:51:d6", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38bbec1-b6", "ovs_interfaceid": "c38bbec1-b6bf-4b43-9914-b4dc63d8e894", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.370932] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e78ff685-6dd0-4bff-9f4f-809eab067a11 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.486944] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482646} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.487216] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715/3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1333.487428] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1333.487668] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c23e6223-832d-4b8b-8c5f-0d51cc4c7c17 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.493324] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1333.493324] env[68244]: value = "task-2781408" [ 1333.493324] env[68244]: _type = "Task" [ 1333.493324] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.496655] env[68244]: DEBUG nova.objects.instance [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'pci_requests' on Instance uuid 0c949d23-d98f-47d2-9f3c-d520df035d55 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.502207] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.555816] env[68244]: DEBUG oslo_concurrency.lockutils [req-5f5ba492-d67c-4d43-949c-5918df528469 req-55b32f5d-1ddf-46d1-9b25-747536251fd6 service nova] Releasing lock "refresh_cache-64467948-35bb-4ad7-ac76-bbbd6f66e96f" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1333.583096] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "256a0329-07b6-4bc2-a574-6e5a108d301a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1333.583365] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.999198] env[68244]: DEBUG nova.objects.base [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Object Instance<0c949d23-d98f-47d2-9f3c-d520df035d55> lazy-loaded attributes: flavor,pci_requests {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1333.999657] env[68244]: DEBUG nova.network.neutron [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1334.004717] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061953} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.005237] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1334.005967] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a8bef1-2af1-475c-8a9e-5f0162897db3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.027436] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715/3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1334.027675] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b3d7a3a-ead6-4a9c-bc57-b9d652076ba7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.048026] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1334.048026] env[68244]: value = "task-2781409" [ 1334.048026] env[68244]: _type = "Task" [ 1334.048026] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.055328] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.074739] env[68244]: DEBUG nova.policy [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e9255893619f46dcbedf7b2caa993e13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '210583c7f70d4a77937bb82ce46d752c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1334.086553] env[68244]: INFO nova.compute.manager [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Detaching volume 95f17070-a4e8-4189-83cc-faed53fca935 [ 1334.118377] env[68244]: INFO nova.virt.block_device [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Attempting to driver detach volume 95f17070-a4e8-4189-83cc-faed53fca935 from mountpoint /dev/sdb [ 1334.118683] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1334.118926] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559185', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'name': 'volume-95f17070-a4e8-4189-83cc-faed53fca935', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '256a0329-07b6-4bc2-a574-6e5a108d301a', 'attached_at': '', 'detached_at': '', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'serial': '95f17070-a4e8-4189-83cc-faed53fca935'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1334.119883] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b172ca1b-87d4-47d8-8521-a2cc296b78bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.144031] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac752df-5564-4e1d-a345-e852d67dcde6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.151029] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ec74ac-1bfa-4465-b237-4973c236372a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.171393] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bb1643-7e58-45ef-9c42-ec12ddcf79d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.185880] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] The volume has not been displaced from its original location: [datastore2] volume-95f17070-a4e8-4189-83cc-faed53fca935/volume-95f17070-a4e8-4189-83cc-faed53fca935.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1334.191068] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1334.191363] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a39af568-d9cb-42b8-86ff-158543e7ab61 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.208843] env[68244]: DEBUG oslo_vmware.api [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1334.208843] env[68244]: value = "task-2781410" [ 1334.208843] env[68244]: _type = "Task" [ 1334.208843] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.216447] env[68244]: DEBUG oslo_vmware.api [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.369766] env[68244]: DEBUG nova.compute.manager [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1334.369972] env[68244]: DEBUG nova.compute.manager [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing instance network info cache due to event network-changed-7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1334.370222] env[68244]: DEBUG oslo_concurrency.lockutils [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.370367] env[68244]: DEBUG oslo_concurrency.lockutils [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1334.370532] env[68244]: DEBUG nova.network.neutron [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1334.558430] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781409, 'name': ReconfigVM_Task, 'duration_secs': 0.273298} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.558713] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715/3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.559353] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11ae2371-1889-421d-945a-eaac8e383c2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.565481] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1334.565481] env[68244]: value = "task-2781411" [ 1334.565481] env[68244]: _type = "Task" [ 1334.565481] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.572644] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781411, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.718997] env[68244]: DEBUG oslo_vmware.api [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781410, 'name': ReconfigVM_Task, 'duration_secs': 0.232483} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.719273] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1334.723948] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25844308-169a-40fd-983f-46e671dcd0fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.737506] env[68244]: DEBUG oslo_vmware.api [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1334.737506] env[68244]: value = "task-2781412" [ 1334.737506] env[68244]: _type = "Task" [ 1334.737506] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.744900] env[68244]: DEBUG oslo_vmware.api [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.069633] env[68244]: DEBUG nova.network.neutron [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updated VIF entry in instance network info cache for port 7ddc5996-0e46-45a6-996d-9bad1b9c3955. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.069989] env[68244]: DEBUG nova.network.neutron [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.076955] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781411, 'name': Rename_Task, 'duration_secs': 0.159764} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.077227] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1335.077470] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e602c46e-654b-4fb3-aa83-4765552b0d1e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.084563] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1335.084563] env[68244]: value = "task-2781413" [ 1335.084563] env[68244]: _type = "Task" [ 1335.084563] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.093568] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781413, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.247764] env[68244]: DEBUG oslo_vmware.api [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781412, 'name': ReconfigVM_Task, 'duration_secs': 0.141868} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.248095] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559185', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'name': 'volume-95f17070-a4e8-4189-83cc-faed53fca935', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '256a0329-07b6-4bc2-a574-6e5a108d301a', 'attached_at': '', 'detached_at': '', 'volume_id': '95f17070-a4e8-4189-83cc-faed53fca935', 'serial': '95f17070-a4e8-4189-83cc-faed53fca935'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1335.573334] env[68244]: DEBUG oslo_concurrency.lockutils [req-c9820b57-71d8-45aa-ba15-f4e4e39f8438 req-7280df3c-064b-48ce-8fcd-88d99e2646c1 service nova] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.597818] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781413, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.616858] env[68244]: DEBUG nova.network.neutron [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Successfully updated port: 5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1335.787556] env[68244]: DEBUG nova.objects.instance [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'flavor' on Instance uuid 256a0329-07b6-4bc2-a574-6e5a108d301a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1336.095761] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781413, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.122887] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.123018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.123211] env[68244]: DEBUG nova.network.neutron [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1336.399863] env[68244]: DEBUG nova.compute.manager [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-vif-plugged-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1336.400047] env[68244]: DEBUG oslo_concurrency.lockutils [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] Acquiring lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.400244] env[68244]: DEBUG oslo_concurrency.lockutils [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.400418] env[68244]: DEBUG oslo_concurrency.lockutils [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.400584] env[68244]: DEBUG nova.compute.manager [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] No waiting events found dispatching network-vif-plugged-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1336.400747] env[68244]: WARNING nova.compute.manager [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received unexpected event network-vif-plugged-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 for instance with vm_state active and task_state None. [ 1336.400904] env[68244]: DEBUG nova.compute.manager [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-changed-5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1336.401064] env[68244]: DEBUG nova.compute.manager [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing instance network info cache due to event network-changed-5331c2be-2e5c-4d67-b2f9-1e5198f073c0. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1336.401234] env[68244]: DEBUG oslo_concurrency.lockutils [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.595364] env[68244]: DEBUG oslo_vmware.api [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781413, 'name': PowerOnVM_Task, 'duration_secs': 1.20472} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.595671] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1336.595849] env[68244]: INFO nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1336.596035] env[68244]: DEBUG nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1336.596795] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03661a15-0167-4acd-a486-e6ada4f794be {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.656979] env[68244]: WARNING nova.network.neutron [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] 30f3eb09-0134-4d69-88c7-1e99db7c1d78 already exists in list: networks containing: ['30f3eb09-0134-4d69-88c7-1e99db7c1d78']. ignoring it [ 1336.797373] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2a200406-2a4a-4642-b50c-de3510987e5d tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.898347] env[68244]: DEBUG nova.network.neutron [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "address": "fa:16:3e:b7:17:b5", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5331c2be-2e", "ovs_interfaceid": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.114332] env[68244]: INFO nova.compute.manager [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Took 12.37 seconds to build instance. [ 1337.401383] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.402044] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.402214] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.402531] env[68244]: DEBUG oslo_concurrency.lockutils [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.402721] env[68244]: DEBUG nova.network.neutron [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Refreshing network info cache for port 5331c2be-2e5c-4d67-b2f9-1e5198f073c0 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1337.404416] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8746106b-3c47-49de-8e19-d63dd81b866c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.421561] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1337.421757] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1337.421913] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1337.422106] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1337.422253] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1337.422397] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1337.422626] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1337.422789] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1337.422953] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1337.423225] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1337.423294] env[68244]: DEBUG nova.virt.hardware [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1337.429405] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Reconfiguring VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1337.430257] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c89f00ab-f1e2-4522-b6c5-e7b139c209b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.446863] env[68244]: DEBUG oslo_vmware.api [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1337.446863] env[68244]: value = "task-2781414" [ 1337.446863] env[68244]: _type = "Task" [ 1337.446863] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.454424] env[68244]: DEBUG oslo_vmware.api [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781414, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.617101] env[68244]: DEBUG oslo_concurrency.lockutils [None req-19b66d3a-52e1-4224-9804-ef59e175119c tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.877s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.830287] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "256a0329-07b6-4bc2-a574-6e5a108d301a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.831052] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.831052] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "256a0329-07b6-4bc2-a574-6e5a108d301a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.831257] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.831349] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.833890] env[68244]: INFO nova.compute.manager [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Terminating instance [ 1337.958986] env[68244]: DEBUG oslo_vmware.api [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781414, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.202520] env[68244]: DEBUG nova.network.neutron [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updated VIF entry in instance network info cache for port 5331c2be-2e5c-4d67-b2f9-1e5198f073c0. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1338.203018] env[68244]: DEBUG nova.network.neutron [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "address": "fa:16:3e:b7:17:b5", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5331c2be-2e", "ovs_interfaceid": "5331c2be-2e5c-4d67-b2f9-1e5198f073c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.338818] env[68244]: DEBUG nova.compute.manager [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1338.339163] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1338.340463] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25971b6-edb8-4869-909b-89117ecd130e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.350721] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.351077] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7ed9ffa-d6fb-4639-be02-c1fc5a68c14e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.358086] env[68244]: DEBUG oslo_vmware.api [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1338.358086] env[68244]: value = "task-2781415" [ 1338.358086] env[68244]: _type = "Task" [ 1338.358086] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.366143] env[68244]: DEBUG oslo_vmware.api [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.428973] env[68244]: DEBUG nova.compute.manager [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Received event network-changed-53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1338.428973] env[68244]: DEBUG nova.compute.manager [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Refreshing instance network info cache due to event network-changed-53c89888-9d51-40ce-af06-fc64566eb47d. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1338.429291] env[68244]: DEBUG oslo_concurrency.lockutils [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] Acquiring lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.429291] env[68244]: DEBUG oslo_concurrency.lockutils [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] Acquired lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1338.429830] env[68244]: DEBUG nova.network.neutron [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Refreshing network info cache for port 53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1338.459483] env[68244]: DEBUG oslo_vmware.api [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781414, 'name': ReconfigVM_Task, 'duration_secs': 0.521328} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.459995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.460225] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Reconfigured VM to attach interface {{(pid=68244) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1338.705893] env[68244]: DEBUG oslo_concurrency.lockutils [req-2cbc0688-876d-4aa9-88bb-30c1c07128dc req-dfd908ec-f767-4062-b247-fca27f9552cb service nova] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.867975] env[68244]: DEBUG oslo_vmware.api [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781415, 'name': PowerOffVM_Task, 'duration_secs': 0.162072} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.869096] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1338.869096] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1338.869096] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5d1ab4a-6bb4-4f8d-9fcf-ef2372fc8c62 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.945727] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1338.946279] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1338.946427] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleting the datastore file [datastore2] 256a0329-07b6-4bc2-a574-6e5a108d301a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1338.946793] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6484a798-9515-4fe1-9f06-9c10ee9e1dec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.954158] env[68244]: DEBUG oslo_vmware.api [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for the task: (returnval){ [ 1338.954158] env[68244]: value = "task-2781418" [ 1338.954158] env[68244]: _type = "Task" [ 1338.954158] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.964917] env[68244]: DEBUG oslo_concurrency.lockutils [None req-445a004e-a122-4824-93ea-eefdf1c4ec27 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-0c949d23-d98f-47d2-9f3c-d520df035d55-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.066s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.965852] env[68244]: DEBUG oslo_vmware.api [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.155942] env[68244]: DEBUG nova.network.neutron [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Updated VIF entry in instance network info cache for port 53c89888-9d51-40ce-af06-fc64566eb47d. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1339.156345] env[68244]: DEBUG nova.network.neutron [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Updating instance_info_cache with network_info: [{"id": "53c89888-9d51-40ce-af06-fc64566eb47d", "address": "fa:16:3e:03:0a:29", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53c89888-9d", "ovs_interfaceid": "53c89888-9d51-40ce-af06-fc64566eb47d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.464239] env[68244]: DEBUG oslo_vmware.api [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Task: {'id': task-2781418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144867} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.464593] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1339.464678] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1339.465008] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1339.465249] env[68244]: INFO nova.compute.manager [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1339.465496] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1339.465691] env[68244]: DEBUG nova.compute.manager [-] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1339.465784] env[68244]: DEBUG nova.network.neutron [-] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1339.659411] env[68244]: DEBUG oslo_concurrency.lockutils [req-e534015d-c597-42e6-8bb3-a30cecd26a8b req-597945d6-ed24-4a45-afe2-c755fe86778b service nova] Releasing lock "refresh_cache-3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1340.409685] env[68244]: DEBUG nova.network.neutron [-] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.461832] env[68244]: DEBUG nova.compute.manager [req-25b907e8-3e8f-4a6b-9de7-d5707bf18e91 req-f7c68757-0d75-4bcd-806c-fba8bf1afd5b service nova] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Received event network-vif-deleted-67156309-cd60-4569-ab05-05548717b11b {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1340.587809] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "interface-0c949d23-d98f-47d2-9f3c-d520df035d55-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.588204] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-0c949d23-d98f-47d2-9f3c-d520df035d55-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.913096] env[68244]: INFO nova.compute.manager [-] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Took 1.45 seconds to deallocate network for instance. [ 1341.092049] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.092262] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1341.095074] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874d1502-6c2d-4b02-9dde-6f44256fa60f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.121111] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2663fe99-90cd-41ac-8f23-93d0737dd223 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.153209] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Reconfiguring VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1341.153630] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e45a450f-42ca-486c-98f8-504833b9e2a0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.173373] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1341.173373] env[68244]: value = "task-2781419" [ 1341.173373] env[68244]: _type = "Task" [ 1341.173373] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.181361] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.419673] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.419996] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.420251] env[68244]: DEBUG nova.objects.instance [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lazy-loading 'resources' on Instance uuid 256a0329-07b6-4bc2-a574-6e5a108d301a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.685669] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.028586] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb4fcc5-44ac-481e-8771-d948101f4a21 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.036557] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c71074e-bef4-4c09-8523-9dfac67fe006 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.066138] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d343d5e-e08f-45b9-b7f5-e4489e173e4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.074958] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb4cd15-74be-43c2-a119-c36877fd5d1a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.087694] env[68244]: DEBUG nova.compute.provider_tree [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.186104] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.592034] env[68244]: DEBUG nova.scheduler.client.report [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1342.684145] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.096155] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.117806] env[68244]: INFO nova.scheduler.client.report [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Deleted allocations for instance 256a0329-07b6-4bc2-a574-6e5a108d301a [ 1343.184771] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.626055] env[68244]: DEBUG oslo_concurrency.lockutils [None req-563e6c21-01cb-4309-a0b6-5e40fbb1de89 tempest-AttachVolumeNegativeTest-323190800 tempest-AttachVolumeNegativeTest-323190800-project-member] Lock "256a0329-07b6-4bc2-a574-6e5a108d301a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.795s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.685416] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.186703] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.366246] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.366639] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.687127] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.869596] env[68244]: DEBUG nova.compute.utils [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1345.187791] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.373453] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.689029] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.192184] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.433929] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.434229] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.434478] env[68244]: INFO nova.compute.manager [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Attaching volume 5a65761f-0510-4983-8746-1b1f992a81a9 to /dev/sdb [ 1346.465951] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc31a275-b138-429e-aa73-837e64c8c7e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.473475] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecb12ab-99bb-4677-b01f-f18f6f30c3fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.488214] env[68244]: DEBUG nova.virt.block_device [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updating existing volume attachment record: edcfd7d4-cc57-4e4e-9deb-8e620b2556e9 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1346.691073] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.191792] env[68244]: DEBUG oslo_vmware.api [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781419, 'name': ReconfigVM_Task, 'duration_secs': 5.777616} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.191994] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1347.192054] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Reconfigured VM to detach interface {{(pid=68244) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1348.508764] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.509199] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquired lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.509199] env[68244]: DEBUG nova.network.neutron [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1348.965016] env[68244]: DEBUG nova.compute.manager [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1349.005763] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "0c949d23-d98f-47d2-9f3c-d520df035d55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.006110] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.006371] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.006627] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.006886] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1349.009149] env[68244]: INFO nova.compute.manager [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Terminating instance [ 1349.220670] env[68244]: INFO nova.network.neutron [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Port 5331c2be-2e5c-4d67-b2f9-1e5198f073c0 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1349.221068] env[68244]: DEBUG nova.network.neutron [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [{"id": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "address": "fa:16:3e:65:e7:4f", "network": {"id": "30f3eb09-0134-4d69-88c7-1e99db7c1d78", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-792631601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "210583c7f70d4a77937bb82ce46d752c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ddc5996-0e", "ovs_interfaceid": "7ddc5996-0e46-45a6-996d-9bad1b9c3955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.486107] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.486393] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.514991] env[68244]: DEBUG nova.compute.manager [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1349.515326] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1349.516292] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea762d6-83d6-48cc-ab2d-8471541a29ed {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.524504] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1349.524739] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69ea3c15-52ce-4f77-b938-ad07b9cdb023 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.531644] env[68244]: DEBUG oslo_vmware.api [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1349.531644] env[68244]: value = "task-2781428" [ 1349.531644] env[68244]: _type = "Task" [ 1349.531644] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.540103] env[68244]: DEBUG oslo_vmware.api [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.724135] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Releasing lock "refresh_cache-0c949d23-d98f-47d2-9f3c-d520df035d55" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.992260] env[68244]: INFO nova.compute.claims [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1350.042175] env[68244]: DEBUG oslo_vmware.api [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781428, 'name': PowerOffVM_Task, 'duration_secs': 0.183721} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.042450] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1350.042658] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1350.042935] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc4fbbd6-2712-4d5e-b567-2a97c4f25766 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.227837] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b8ddd57d-b17d-4f13-b119-6ff3ed28f615 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "interface-0c949d23-d98f-47d2-9f3c-d520df035d55-5331c2be-2e5c-4d67-b2f9-1e5198f073c0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.640s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.498588] env[68244]: INFO nova.compute.resource_tracker [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating resource usage from migration d31624ff-bf87-4029-94b1-989943b9a49f [ 1350.596949] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164b85e6-ed17-47af-8af7-9fd3a3ce8e09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.605055] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00aba20-883b-4d8c-8beb-eefc1f56131a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.635075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3158337d-e4ce-4143-b969-616db8a07181 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.643186] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac712cb6-6a79-4723-98da-df4620a23440 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.658447] env[68244]: DEBUG nova.compute.provider_tree [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.032632] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1351.032905] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559188', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'name': 'volume-5a65761f-0510-4983-8746-1b1f992a81a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'serial': '5a65761f-0510-4983-8746-1b1f992a81a9'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1351.033794] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6f7ad1-3363-4e2d-a7c1-92eea73ec9c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.050931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1e461f-2b90-4644-a915-0491fd945fda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.078101] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-5a65761f-0510-4983-8746-1b1f992a81a9/volume-5a65761f-0510-4983-8746-1b1f992a81a9.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1351.078393] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-800605ec-5786-4450-a38a-ec0efd1ad69c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.097804] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1351.097804] env[68244]: value = "task-2781432" [ 1351.097804] env[68244]: _type = "Task" [ 1351.097804] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.106244] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781432, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.165788] env[68244]: DEBUG nova.scheduler.client.report [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1351.375443] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1351.375755] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1351.375873] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleting the datastore file [datastore2] 0c949d23-d98f-47d2-9f3c-d520df035d55 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1351.376076] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b423cb7-c439-4b28-bff6-5283e07b3825 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.384850] env[68244]: DEBUG oslo_vmware.api [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1351.384850] env[68244]: value = "task-2781433" [ 1351.384850] env[68244]: _type = "Task" [ 1351.384850] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.392885] env[68244]: DEBUG oslo_vmware.api [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.608436] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781432, 'name': ReconfigVM_Task, 'duration_secs': 0.41241} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.608805] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-5a65761f-0510-4983-8746-1b1f992a81a9/volume-5a65761f-0510-4983-8746-1b1f992a81a9.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1351.613361] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0df11f18-3001-4842-ae82-56028fe4d1b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.628026] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1351.628026] env[68244]: value = "task-2781434" [ 1351.628026] env[68244]: _type = "Task" [ 1351.628026] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.635656] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781434, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.670722] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.184s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.670943] env[68244]: INFO nova.compute.manager [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Migrating [ 1351.896853] env[68244]: DEBUG oslo_vmware.api [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175067} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.897132] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1351.897526] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1351.897526] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1351.897684] env[68244]: INFO nova.compute.manager [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Took 2.38 seconds to destroy the instance on the hypervisor. [ 1351.897905] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1351.898150] env[68244]: DEBUG nova.compute.manager [-] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1351.898250] env[68244]: DEBUG nova.network.neutron [-] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1352.139508] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781434, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.187218] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.187218] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.187218] env[68244]: DEBUG nova.network.neutron [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1352.562524] env[68244]: DEBUG nova.compute.manager [req-4d1ff2fd-5f67-4ad4-b6c8-f26956fa2372 req-1c964e74-6846-4d49-b4fc-edab80efb2ac service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Received event network-vif-deleted-7ddc5996-0e46-45a6-996d-9bad1b9c3955 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1352.562603] env[68244]: INFO nova.compute.manager [req-4d1ff2fd-5f67-4ad4-b6c8-f26956fa2372 req-1c964e74-6846-4d49-b4fc-edab80efb2ac service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Neutron deleted interface 7ddc5996-0e46-45a6-996d-9bad1b9c3955; detaching it from the instance and deleting it from the info cache [ 1352.562779] env[68244]: DEBUG nova.network.neutron [req-4d1ff2fd-5f67-4ad4-b6c8-f26956fa2372 req-1c964e74-6846-4d49-b4fc-edab80efb2ac service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.639846] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781434, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.892790] env[68244]: DEBUG nova.network.neutron [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.041349] env[68244]: DEBUG nova.network.neutron [-] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.064787] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c8781b4-9a99-490f-9592-beb45b9359e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.075328] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350897b3-97c6-408d-805c-aac2a88dc81d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.106259] env[68244]: DEBUG nova.compute.manager [req-4d1ff2fd-5f67-4ad4-b6c8-f26956fa2372 req-1c964e74-6846-4d49-b4fc-edab80efb2ac service nova] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Detach interface failed, port_id=7ddc5996-0e46-45a6-996d-9bad1b9c3955, reason: Instance 0c949d23-d98f-47d2-9f3c-d520df035d55 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1353.139131] env[68244]: DEBUG oslo_vmware.api [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781434, 'name': ReconfigVM_Task, 'duration_secs': 1.127043} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.139441] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559188', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'name': 'volume-5a65761f-0510-4983-8746-1b1f992a81a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'serial': '5a65761f-0510-4983-8746-1b1f992a81a9'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1353.395200] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1353.543527] env[68244]: INFO nova.compute.manager [-] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Took 1.65 seconds to deallocate network for instance. [ 1354.049951] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1354.050288] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1354.050444] env[68244]: DEBUG nova.objects.instance [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'resources' on Instance uuid 0c949d23-d98f-47d2-9f3c-d520df035d55 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1354.177020] env[68244]: DEBUG nova.objects.instance [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid d4adee86-43f1-4d6f-a4a5-8cce39e1f03e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1354.661826] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ad20a3-5509-4ea2-920a-ed978199be77 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.669083] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fb888f-787e-49bb-a400-876c1d5bab6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.701713] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b24f5e-4b24-4e66-856f-348bd7153b32 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.704638] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dfe279f5-bcd7-49d1-9424-483fe98ad321 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.270s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.711164] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c5a26f-3e7a-4407-bce6-db6713418b78 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.726499] env[68244]: DEBUG nova.compute.provider_tree [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.911454] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f6a62a-d9cc-4262-9030-9c7b031767a3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.936354] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance 'ae4d8900-3185-4747-ba8d-fe334d9e3237' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1355.230759] env[68244]: DEBUG nova.scheduler.client.report [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1355.443745] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1355.444063] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b6c39f5-3bd5-4ccd-a01b-fe5b2eae7180 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.458218] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1355.458218] env[68244]: value = "task-2781437" [ 1355.458218] env[68244]: _type = "Task" [ 1355.458218] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.467193] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.597639] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.597830] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.735073] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.759709] env[68244]: INFO nova.scheduler.client.report [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted allocations for instance 0c949d23-d98f-47d2-9f3c-d520df035d55 [ 1355.969363] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781437, 'name': PowerOffVM_Task, 'duration_secs': 0.201297} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.969638] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1355.969835] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance 'ae4d8900-3185-4747-ba8d-fe334d9e3237' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1356.102088] env[68244]: DEBUG nova.compute.utils [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1356.268902] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f2fb465e-e02d-41dc-b2d5-3bf2a31d0739 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "0c949d23-d98f-47d2-9f3c-d520df035d55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.262s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.475845] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1356.476100] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.476255] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1356.476463] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.476588] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1356.476731] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1356.476926] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1356.477094] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1356.477259] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1356.477418] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1356.477589] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1356.482566] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25ae1c66-7b7e-4b75-b5c6-be448a9c979c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.504201] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1356.504201] env[68244]: value = "task-2781438" [ 1356.504201] env[68244]: _type = "Task" [ 1356.504201] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.515698] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.605414] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.727144] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.727428] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.727657] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.727876] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.728064] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.730185] env[68244]: INFO nova.compute.manager [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Terminating instance [ 1357.014690] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781438, 'name': ReconfigVM_Task, 'duration_secs': 0.187814} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.014972] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance 'ae4d8900-3185-4747-ba8d-fe334d9e3237' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1357.234835] env[68244]: DEBUG nova.compute.manager [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1357.234954] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1357.235797] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc589f2-a95f-4606-8efe-5e759a576624 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.244956] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1357.245278] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d655a6b2-018d-420f-bae8-4bc53ec466b4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.253599] env[68244]: DEBUG oslo_vmware.api [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1357.253599] env[68244]: value = "task-2781439" [ 1357.253599] env[68244]: _type = "Task" [ 1357.253599] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.261937] env[68244]: DEBUG oslo_vmware.api [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.521951] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1357.522288] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1357.522349] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1357.522526] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1357.522690] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1357.522871] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1357.523233] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1357.523454] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1357.523690] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1357.523905] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1357.524149] env[68244]: DEBUG nova.virt.hardware [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1357.529480] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1357.529825] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e54b2270-120f-46a3-a8a5-f9e4e42e8a4e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.550502] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1357.550502] env[68244]: value = "task-2781440" [ 1357.550502] env[68244]: _type = "Task" [ 1357.550502] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.559389] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.673662] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1357.674133] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1357.674567] env[68244]: INFO nova.compute.manager [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Attaching volume 8fe5aeb8-4660-43fe-ae91-9e63bf1477ec to /dev/sdc [ 1357.709601] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb3b25a-0aa6-4be9-b2aa-1338d2df7625 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.718596] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b353958b-5190-469a-b325-c7fb7ea06e1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.734939] env[68244]: DEBUG nova.virt.block_device [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updating existing volume attachment record: 6efe9fc4-c136-4d98-8d4a-d405c93f97de {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1357.763615] env[68244]: DEBUG oslo_vmware.api [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781439, 'name': PowerOffVM_Task, 'duration_secs': 0.202606} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.763922] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1357.764131] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1357.764424] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc4b9c3d-a77b-4669-836d-8fcc1faa3eb1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.834489] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1357.834759] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1357.835055] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleting the datastore file [datastore2] 64467948-35bb-4ad7-ac76-bbbd6f66e96f {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1357.835346] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dac19641-f099-4c81-989c-d235e8649cdf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.842346] env[68244]: DEBUG oslo_vmware.api [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for the task: (returnval){ [ 1357.842346] env[68244]: value = "task-2781442" [ 1357.842346] env[68244]: _type = "Task" [ 1357.842346] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.850023] env[68244]: DEBUG oslo_vmware.api [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.060893] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781440, 'name': ReconfigVM_Task, 'duration_secs': 0.180529} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.061143] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1358.061910] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed6f2f0-a0eb-4e72-bc89-daaeb43015fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.084889] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1358.085132] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69b7ed59-3a33-4852-9409-a0f7dc1789b2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.103542] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1358.103542] env[68244]: value = "task-2781444" [ 1358.103542] env[68244]: _type = "Task" [ 1358.103542] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.111102] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.283610] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.283873] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.353307] env[68244]: DEBUG oslo_vmware.api [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Task: {'id': task-2781442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152392} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.353542] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1358.353773] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1358.353947] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1358.354220] env[68244]: INFO nova.compute.manager [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1358.354487] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1358.354674] env[68244]: DEBUG nova.compute.manager [-] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1358.354770] env[68244]: DEBUG nova.network.neutron [-] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1358.606301] env[68244]: DEBUG nova.compute.manager [req-89254193-fee6-4ac2-b1f6-931c2ac8187d req-86923c33-d076-4722-b4cc-467696533fcf service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Received event network-vif-deleted-c38bbec1-b6bf-4b43-9914-b4dc63d8e894 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1358.606551] env[68244]: INFO nova.compute.manager [req-89254193-fee6-4ac2-b1f6-931c2ac8187d req-86923c33-d076-4722-b4cc-467696533fcf service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Neutron deleted interface c38bbec1-b6bf-4b43-9914-b4dc63d8e894; detaching it from the instance and deleting it from the info cache [ 1358.606793] env[68244]: DEBUG nova.network.neutron [req-89254193-fee6-4ac2-b1f6-931c2ac8187d req-86923c33-d076-4722-b4cc-467696533fcf service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.618463] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.785997] env[68244]: DEBUG nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1359.090400] env[68244]: DEBUG nova.network.neutron [-] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.109908] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68f433be-16c7-4fad-8abe-176309106fda {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.117954] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781444, 'name': ReconfigVM_Task, 'duration_secs': 0.829518} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.119074] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfigured VM instance instance-00000070 to attach disk [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1359.119357] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance 'ae4d8900-3185-4747-ba8d-fe334d9e3237' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1359.125962] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66472cef-5448-483e-9e2d-9ddfcbed3f1d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.158189] env[68244]: DEBUG nova.compute.manager [req-89254193-fee6-4ac2-b1f6-931c2ac8187d req-86923c33-d076-4722-b4cc-467696533fcf service nova] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Detach interface failed, port_id=c38bbec1-b6bf-4b43-9914-b4dc63d8e894, reason: Instance 64467948-35bb-4ad7-ac76-bbbd6f66e96f could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1359.316600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.316889] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.318910] env[68244]: INFO nova.compute.claims [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1359.593425] env[68244]: INFO nova.compute.manager [-] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Took 1.24 seconds to deallocate network for instance. [ 1359.639067] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e5f598-47be-439e-8a7f-d02a9bccf8c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.658771] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b8df29-566a-4311-a033-be083b9718f8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.676775] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance 'ae4d8900-3185-4747-ba8d-fe334d9e3237' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1360.099677] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.229206] env[68244]: DEBUG nova.network.neutron [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Port e1662b1e-6c27-4782-bc05-758a8a5c71f2 binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1360.421320] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3444932b-0add-49df-a1ae-14bb91aa5716 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.429298] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd9702a-8018-4813-b855-c270e186571e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.459956] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7f17bc-a0a2-4ed0-ac62-94617a7d7934 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.467376] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8a730b-0aa7-4a57-89ae-52efc061b437 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.480928] env[68244]: DEBUG nova.compute.provider_tree [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1361.004749] env[68244]: ERROR nova.scheduler.client.report [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [req-4ac5c377-00e8-473f-9ad6-08172b681647] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID b885cb16-3bd4-46d8-abd9-28a1bf1058e3. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4ac5c377-00e8-473f-9ad6-08172b681647"}]} [ 1361.022458] env[68244]: DEBUG nova.scheduler.client.report [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Refreshing inventories for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1361.036927] env[68244]: DEBUG nova.scheduler.client.report [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Updating ProviderTree inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1361.037187] env[68244]: DEBUG nova.compute.provider_tree [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1361.048985] env[68244]: DEBUG nova.scheduler.client.report [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Refreshing aggregate associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, aggregates: None {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1361.068610] env[68244]: DEBUG nova.scheduler.client.report [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Refreshing trait associations for resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68244) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1361.159406] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ecd70b-a9d8-4b35-9f94-4448eb113c60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.167021] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dbda73-ee0b-4955-9791-3da9d25a8e8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.196129] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a72180-f0fb-4611-9fbd-3a444f87418d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.202984] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a6077b-e578-439a-b4d6-5967f71c7da8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.217018] env[68244]: DEBUG nova.compute.provider_tree [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1361.250541] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.250793] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.250953] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1361.749856] env[68244]: DEBUG nova.scheduler.client.report [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Updated inventory for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1361.750179] env[68244]: DEBUG nova.compute.provider_tree [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Updating resource provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 generation from 176 to 177 during operation: update_inventory {{(pid=68244) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1361.750364] env[68244]: DEBUG nova.compute.provider_tree [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Updating inventory in ProviderTree for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1362.257995] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.941s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.258446] env[68244]: DEBUG nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1362.264483] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.165s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.264698] env[68244]: DEBUG nova.objects.instance [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lazy-loading 'resources' on Instance uuid 64467948-35bb-4ad7-ac76-bbbd6f66e96f {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1362.285772] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1362.286010] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559190', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'name': 'volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'serial': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1362.286958] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f21b7a9-f417-41b9-b4d4-fbef6f24abe6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.294091] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.294304] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1362.294490] env[68244]: DEBUG nova.network.neutron [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1362.310035] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3358de9-25e5-4b75-83d9-a637dd5d748f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.338983] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec/volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1362.341710] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97752b41-d347-4baf-99a2-d3c29f222190 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.361568] env[68244]: DEBUG oslo_vmware.api [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1362.361568] env[68244]: value = "task-2781446" [ 1362.361568] env[68244]: _type = "Task" [ 1362.361568] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.371472] env[68244]: DEBUG oslo_vmware.api [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.565852] env[68244]: DEBUG nova.network.neutron [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.766639] env[68244]: DEBUG nova.compute.utils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1362.770648] env[68244]: DEBUG nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1362.770857] env[68244]: DEBUG nova.network.neutron [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1362.809341] env[68244]: DEBUG nova.policy [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3151a146805a456da750a47964f86f2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a151f53070d94d08bf7e85617a6f5190', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1362.873684] env[68244]: DEBUG oslo_vmware.api [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781446, 'name': ReconfigVM_Task, 'duration_secs': 0.422903} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.875914] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec/volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1362.880764] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd478f9c-109d-417c-9888-04f36b8a4f81 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.891663] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123759de-3bc8-4b1a-b2d0-f01c2a54d7e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.899779] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8b8caf-2218-4ff3-bb87-859eaed9abfb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.904103] env[68244]: DEBUG oslo_vmware.api [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1362.904103] env[68244]: value = "task-2781447" [ 1362.904103] env[68244]: _type = "Task" [ 1362.904103] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.935822] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199a1ccd-b458-4f02-996b-abdbf5f148b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.938455] env[68244]: DEBUG oslo_vmware.api [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.944234] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebdac40-3f43-41be-909f-48716536cd97 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.958488] env[68244]: DEBUG nova.compute.provider_tree [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.072017] env[68244]: DEBUG oslo_concurrency.lockutils [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1363.110300] env[68244]: DEBUG nova.network.neutron [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Successfully created port: cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.274534] env[68244]: DEBUG nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1363.415427] env[68244]: DEBUG oslo_vmware.api [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781447, 'name': ReconfigVM_Task, 'duration_secs': 0.163948} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.415838] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559190', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'name': 'volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'serial': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1363.461399] env[68244]: DEBUG nova.scheduler.client.report [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1363.596152] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3453c79-44b9-4b27-9d00-e09820a0e4a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.616252] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da79899-c8e5-452d-a9eb-6de16e43f870 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.623642] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance 'ae4d8900-3185-4747-ba8d-fe334d9e3237' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1363.779996] env[68244]: INFO nova.virt.block_device [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Booting with volume e6180bf6-5d7f-41f5-a61b-f7d3249193c6 at /dev/sda [ 1363.810042] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6591aa8b-b15a-447d-a5f0-e88275a0050c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.820482] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dddcc6-262b-4284-8b13-7cb511a921c6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.851116] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b6441f7-5aed-40ea-9ca2-32c267f1df01 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.859861] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d06d52a-caa9-4f27-8fa1-0a4c5797cf5d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.890801] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938836f8-6bdb-40a7-b32d-c142e6e2e636 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.897243] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec94385-7f10-4c43-953c-ce88166e11d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.910933] env[68244]: DEBUG nova.virt.block_device [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating existing volume attachment record: 6a9afa25-385d-4f03-8ec9-89b9e192c462 {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1363.966224] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.702s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.984143] env[68244]: INFO nova.scheduler.client.report [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Deleted allocations for instance 64467948-35bb-4ad7-ac76-bbbd6f66e96f [ 1364.130259] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1364.130610] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46adaa19-5776-4c5c-91c8-4dd1c679fa48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.139493] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1364.139493] env[68244]: value = "task-2781448" [ 1364.139493] env[68244]: _type = "Task" [ 1364.139493] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.148413] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.452870] env[68244]: DEBUG nova.objects.instance [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid d4adee86-43f1-4d6f-a4a5-8cce39e1f03e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1364.475558] env[68244]: DEBUG nova.compute.manager [req-701d29d9-ca24-4987-bcf8-15b3e9ad2201 req-76688aa0-6c07-429e-94f9-aa31a75b6981 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Received event network-vif-plugged-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1364.475853] env[68244]: DEBUG oslo_concurrency.lockutils [req-701d29d9-ca24-4987-bcf8-15b3e9ad2201 req-76688aa0-6c07-429e-94f9-aa31a75b6981 service nova] Acquiring lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.476065] env[68244]: DEBUG oslo_concurrency.lockutils [req-701d29d9-ca24-4987-bcf8-15b3e9ad2201 req-76688aa0-6c07-429e-94f9-aa31a75b6981 service nova] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1364.476173] env[68244]: DEBUG oslo_concurrency.lockutils [req-701d29d9-ca24-4987-bcf8-15b3e9ad2201 req-76688aa0-6c07-429e-94f9-aa31a75b6981 service nova] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.476341] env[68244]: DEBUG nova.compute.manager [req-701d29d9-ca24-4987-bcf8-15b3e9ad2201 req-76688aa0-6c07-429e-94f9-aa31a75b6981 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] No waiting events found dispatching network-vif-plugged-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1364.476506] env[68244]: WARNING nova.compute.manager [req-701d29d9-ca24-4987-bcf8-15b3e9ad2201 req-76688aa0-6c07-429e-94f9-aa31a75b6981 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Received unexpected event network-vif-plugged-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab for instance with vm_state building and task_state block_device_mapping. [ 1364.492114] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ca677b5e-713a-4e4c-b63c-e67f439dab90 tempest-AttachInterfacesTestJSON-1063479517 tempest-AttachInterfacesTestJSON-1063479517-project-member] Lock "64467948-35bb-4ad7-ac76-bbbd6f66e96f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.765s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.570537] env[68244]: DEBUG nova.network.neutron [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Successfully updated port: cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1364.650855] env[68244]: DEBUG oslo_vmware.api [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781448, 'name': PowerOnVM_Task, 'duration_secs': 0.396959} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.651150] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1364.651339] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-44a3d17f-5a8a-4f4c-9481-75a7fbd14147 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance 'ae4d8900-3185-4747-ba8d-fe334d9e3237' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1364.960754] env[68244]: DEBUG oslo_concurrency.lockutils [None req-b0fcde89-1255-4f56-ad0c-c23be6c42311 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.287s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.073866] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.074043] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1365.074206] env[68244]: DEBUG nova.network.neutron [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1365.237448] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.237709] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.606620] env[68244]: DEBUG nova.network.neutron [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1365.741120] env[68244]: INFO nova.compute.manager [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Detaching volume 5a65761f-0510-4983-8746-1b1f992a81a9 [ 1365.755218] env[68244]: DEBUG nova.network.neutron [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [{"id": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "address": "fa:16:3e:79:b9:93", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd5bf8e6-bd", "ovs_interfaceid": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.784271] env[68244]: INFO nova.virt.block_device [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Attempting to driver detach volume 5a65761f-0510-4983-8746-1b1f992a81a9 from mountpoint /dev/sdb [ 1365.784514] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1365.784698] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559188', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'name': 'volume-5a65761f-0510-4983-8746-1b1f992a81a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'serial': '5a65761f-0510-4983-8746-1b1f992a81a9'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1365.785839] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c1079f-52b4-49e2-9b25-af53dae1789d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.812797] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1c4ed7-9b93-46e2-b6a7-796abeffcef4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.820327] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd2fdeb-b56b-4070-adc1-fe6cd0216705 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.843420] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20433f20-3444-4eea-a689-f72984a4b5d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.861624] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] The volume has not been displaced from its original location: [datastore2] volume-5a65761f-0510-4983-8746-1b1f992a81a9/volume-5a65761f-0510-4983-8746-1b1f992a81a9.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1365.866952] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1365.867285] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39cb272e-427d-400d-8649-a722173d0544 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.887461] env[68244]: DEBUG oslo_vmware.api [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1365.887461] env[68244]: value = "task-2781449" [ 1365.887461] env[68244]: _type = "Task" [ 1365.887461] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.896187] env[68244]: DEBUG oslo_vmware.api [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781449, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.994537] env[68244]: DEBUG nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1365.995181] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1365.995438] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1365.995635] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1365.995881] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1365.996165] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1365.996399] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1365.996685] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1365.996915] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1365.997173] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1365.997472] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1365.997742] env[68244]: DEBUG nova.virt.hardware [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1365.998806] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6192c72-3300-4ecf-8069-f196207af939 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.009144] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3251a4e6-0867-4e0a-be1a-6ce2d4d51ba9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.258473] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1366.258823] env[68244]: DEBUG nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Instance network_info: |[{"id": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "address": "fa:16:3e:79:b9:93", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd5bf8e6-bd", "ovs_interfaceid": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1366.259263] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:b9:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd5bf8e6-bd7d-473b-889f-4d23b1c887ab', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1366.266965] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1366.267335] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1366.267627] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5926329-03eb-4474-be34-e2eb83c5c4cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.291173] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1366.291173] env[68244]: value = "task-2781450" [ 1366.291173] env[68244]: _type = "Task" [ 1366.291173] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.300116] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781450, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.397906] env[68244]: DEBUG oslo_vmware.api [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781449, 'name': ReconfigVM_Task, 'duration_secs': 0.248548} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.398213] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1366.402867] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff2c0593-28c4-46c7-bb1a-820e2f632e58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.419154] env[68244]: DEBUG oslo_vmware.api [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1366.419154] env[68244]: value = "task-2781451" [ 1366.419154] env[68244]: _type = "Task" [ 1366.419154] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.427706] env[68244]: DEBUG oslo_vmware.api [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.514676] env[68244]: DEBUG nova.compute.manager [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Received event network-changed-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1366.514889] env[68244]: DEBUG nova.compute.manager [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Refreshing instance network info cache due to event network-changed-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1366.515150] env[68244]: DEBUG oslo_concurrency.lockutils [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] Acquiring lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.515394] env[68244]: DEBUG oslo_concurrency.lockutils [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] Acquired lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1366.515469] env[68244]: DEBUG nova.network.neutron [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Refreshing network info cache for port cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1366.803028] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781450, 'name': CreateVM_Task, 'duration_secs': 0.36941} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.803290] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1366.803933] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'boot_index': 0, 'guest_format': None, 'attachment_id': '6a9afa25-385d-4f03-8ec9-89b9e192c462', 'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559189', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'name': 'volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5519079a-d2a5-48c5-921c-199e0fc60aa3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'serial': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6'}, 'volume_type': None}], 'swap': None} {{(pid=68244) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1366.804168] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Root volume attach. Driver type: vmdk {{(pid=68244) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1366.805010] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef34aa94-8d46-4cbc-8886-13f956f15937 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.816691] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8228920d-fdd3-4784-a9ab-2233df9aef1a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.823685] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6236ade7-c30d-4e20-b478-9d148da9c397 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.833255] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c17e86b6-7c04-4bf5-a9b6-6183a898f81e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.843386] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1366.843386] env[68244]: value = "task-2781452" [ 1366.843386] env[68244]: _type = "Task" [ 1366.843386] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.864893] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781452, 'name': RelocateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.933996] env[68244]: DEBUG oslo_vmware.api [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781451, 'name': ReconfigVM_Task, 'duration_secs': 0.167257} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.934151] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559188', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'name': 'volume-5a65761f-0510-4983-8746-1b1f992a81a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '5a65761f-0510-4983-8746-1b1f992a81a9', 'serial': '5a65761f-0510-4983-8746-1b1f992a81a9'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1367.264178] env[68244]: DEBUG nova.network.neutron [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updated VIF entry in instance network info cache for port cd5bf8e6-bd7d-473b-889f-4d23b1c887ab. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1367.264561] env[68244]: DEBUG nova.network.neutron [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [{"id": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "address": "fa:16:3e:79:b9:93", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd5bf8e6-bd", "ovs_interfaceid": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.353793] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781452, 'name': RelocateVM_Task, 'duration_secs': 0.383037} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.354650] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1367.354650] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559189', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'name': 'volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5519079a-d2a5-48c5-921c-199e0fc60aa3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'serial': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1367.355095] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b9a301-b389-485d-afe7-7804341ab197 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.371475] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cc8c07-5c4e-4e55-a88c-ca681d711fb8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.396869] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6/volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1367.397285] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b14c0fe0-90fd-48d1-9395-8e2d106c69c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.418820] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1367.418820] env[68244]: value = "task-2781453" [ 1367.418820] env[68244]: _type = "Task" [ 1367.418820] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.427759] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.484267] env[68244]: DEBUG nova.objects.instance [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid d4adee86-43f1-4d6f-a4a5-8cce39e1f03e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1367.655759] env[68244]: DEBUG nova.network.neutron [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Port e1662b1e-6c27-4782-bc05-758a8a5c71f2 binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1367.655947] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.656121] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.656289] env[68244]: DEBUG nova.network.neutron [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1367.767542] env[68244]: DEBUG oslo_concurrency.lockutils [req-c23e2ea8-f03d-49c2-bb25-cae46dacc123 req-108513ac-dd82-4151-9053-bc8141ba54d3 service nova] Releasing lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1367.929992] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781453, 'name': ReconfigVM_Task, 'duration_secs': 0.273083} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.930239] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6/volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1367.934991] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a515cd43-cd0b-4dc5-9151-c644343c6fad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.952872] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1367.952872] env[68244]: value = "task-2781454" [ 1367.952872] env[68244]: _type = "Task" [ 1367.952872] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.963784] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781454, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.428774] env[68244]: DEBUG nova.network.neutron [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.463924] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781454, 'name': ReconfigVM_Task, 'duration_secs': 0.13879} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.464312] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559189', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'name': 'volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5519079a-d2a5-48c5-921c-199e0fc60aa3', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'serial': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1368.464855] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf7a15ab-075b-4d45-9261-2ddb6e389bf1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.472576] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1368.472576] env[68244]: value = "task-2781455" [ 1368.472576] env[68244]: _type = "Task" [ 1368.472576] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.482895] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781455, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.496544] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7381e034-d84b-406e-b5f1-14909b448a30 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.259s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.515694] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1368.515694] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.002s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.933730] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1368.990510] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781455, 'name': Rename_Task, 'duration_secs': 0.14975} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.990816] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1368.991151] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82fafaf3-0192-4546-9743-dbcb5803b92b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.999414] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1368.999414] env[68244]: value = "task-2781456" [ 1368.999414] env[68244]: _type = "Task" [ 1368.999414] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.009139] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.020069] env[68244]: INFO nova.compute.manager [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Detaching volume 8fe5aeb8-4660-43fe-ae91-9e63bf1477ec [ 1369.076114] env[68244]: INFO nova.virt.block_device [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Attempting to driver detach volume 8fe5aeb8-4660-43fe-ae91-9e63bf1477ec from mountpoint /dev/sdc [ 1369.076905] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1369.076905] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559190', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'name': 'volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'serial': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1369.077473] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f42a4e1-1cba-4532-ab15-053129db1469 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.103928] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43f3aef-00c6-4dd1-9f92-3c4cb037c532 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.112182] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14773929-094f-4a7a-9f0e-6ed59f016077 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.136355] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d9ff26-23da-42bb-9eb4-13106203093d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.153953] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] The volume has not been displaced from its original location: [datastore2] volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec/volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1369.159311] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfiguring VM instance instance-00000071 to detach disk 2002 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1369.159650] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a5b4f3c-f297-4f50-a72f-5ed178b5b774 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.180441] env[68244]: DEBUG oslo_vmware.api [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1369.180441] env[68244]: value = "task-2781457" [ 1369.180441] env[68244]: _type = "Task" [ 1369.180441] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.189862] env[68244]: DEBUG oslo_vmware.api [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.439147] env[68244]: DEBUG nova.compute.manager [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68244) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1369.440197] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1369.440598] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1369.511083] env[68244]: DEBUG oslo_vmware.api [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781456, 'name': PowerOnVM_Task, 'duration_secs': 0.478491} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.511250] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1369.511797] env[68244]: INFO nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Took 3.52 seconds to spawn the instance on the hypervisor. [ 1369.511797] env[68244]: DEBUG nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1369.512338] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e342202-40c5-4ed0-a288-da275555e5e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.692747] env[68244]: DEBUG oslo_vmware.api [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781457, 'name': ReconfigVM_Task, 'duration_secs': 0.325519} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.694049] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Reconfigured VM instance instance-00000071 to detach disk 2002 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1369.697981] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec3f4f22-8e48-467d-9ec0-2001182854c1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.715539] env[68244]: DEBUG oslo_vmware.api [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1369.715539] env[68244]: value = "task-2781458" [ 1369.715539] env[68244]: _type = "Task" [ 1369.715539] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.724407] env[68244]: DEBUG oslo_vmware.api [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.948962] env[68244]: DEBUG nova.objects.instance [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'migration_context' on Instance uuid ae4d8900-3185-4747-ba8d-fe334d9e3237 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1370.029081] env[68244]: INFO nova.compute.manager [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Took 10.74 seconds to build instance. [ 1370.224479] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.224697] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.234943] env[68244]: DEBUG oslo_vmware.api [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781458, 'name': ReconfigVM_Task, 'duration_secs': 0.255702} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.234943] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559190', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'name': 'volume-8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd4adee86-43f1-4d6f-a4a5-8cce39e1f03e', 'attached_at': '', 'detached_at': '', 'volume_id': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec', 'serial': '8fe5aeb8-4660-43fe-ae91-9e63bf1477ec'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1370.530723] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9022e546-ee55-4180-b7d5-e1d45e5d30b8 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.247s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.580233] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e142e7-7f08-489e-9c18-15235266dfb6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.588515] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddaf3be-7492-4e91-9074-c2058da8176f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.634324] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc6b65d-bad3-454c-829c-42bcd752ef49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.638865] env[68244]: DEBUG nova.compute.manager [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Received event network-changed-3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1370.639258] env[68244]: DEBUG nova.compute.manager [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Refreshing instance network info cache due to event network-changed-3cbfb410-db85-46ec-ad9d-96a42b67105e. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1370.639605] env[68244]: DEBUG oslo_concurrency.lockutils [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] Acquiring lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.639874] env[68244]: DEBUG oslo_concurrency.lockutils [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] Acquired lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1370.641103] env[68244]: DEBUG nova.network.neutron [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Refreshing network info cache for port 3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1370.649070] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76c7437-6513-471c-bab2-1a981a1a3ea4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.665362] env[68244]: DEBUG nova.compute.provider_tree [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.732117] env[68244]: DEBUG nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1370.782553] env[68244]: DEBUG nova.objects.instance [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'flavor' on Instance uuid d4adee86-43f1-4d6f-a4a5-8cce39e1f03e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1371.168735] env[68244]: DEBUG nova.scheduler.client.report [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1371.253857] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1371.358682] env[68244]: DEBUG nova.network.neutron [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updated VIF entry in instance network info cache for port 3cbfb410-db85-46ec-ad9d-96a42b67105e. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1371.359114] env[68244]: DEBUG nova.network.neutron [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updating instance_info_cache with network_info: [{"id": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "address": "fa:16:3e:fe:07:19", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cbfb410-db", "ovs_interfaceid": "3cbfb410-db85-46ec-ad9d-96a42b67105e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.790746] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cf2e063c-9c44-41f6-947d-a80d484bcce3 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.276s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.862172] env[68244]: DEBUG oslo_concurrency.lockutils [req-e6f2def9-aafb-4425-9f15-af20115b40b2 req-75912103-4b7f-4c9a-8b68-23da865fdbb2 service nova] Releasing lock "refresh_cache-75bec02f-82f7-4e8d-81da-3c511588be29" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1372.045422] env[68244]: DEBUG nova.compute.manager [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1372.180070] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.739s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.186519] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.934s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.188119] env[68244]: INFO nova.compute.claims [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1372.567777] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.644545] env[68244]: DEBUG nova.compute.manager [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Received event network-changed-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1372.644722] env[68244]: DEBUG nova.compute.manager [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Refreshing instance network info cache due to event network-changed-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1372.644788] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] Acquiring lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.644978] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] Acquired lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1372.645157] env[68244]: DEBUG nova.network.neutron [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Refreshing network info cache for port cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.966077] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.966288] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.966338] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.966514] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.966688] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.969063] env[68244]: INFO nova.compute.manager [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Terminating instance [ 1373.078196] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.339445] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830396bb-7635-4fc8-97ef-600c47fe9f6c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.347795] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a00ffe-3e36-4656-b1fe-62189e757fa1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.377977] env[68244]: DEBUG nova.network.neutron [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updated VIF entry in instance network info cache for port cd5bf8e6-bd7d-473b-889f-4d23b1c887ab. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.378327] env[68244]: DEBUG nova.network.neutron [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [{"id": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "address": "fa:16:3e:79:b9:93", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd5bf8e6-bd", "ovs_interfaceid": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.379935] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261570e5-6536-428d-97f3-07260ae15d52 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.387812] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9422b4eb-818e-4972-b6c4-1b73569fbcfa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.401674] env[68244]: DEBUG nova.compute.provider_tree [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.473031] env[68244]: DEBUG nova.compute.manager [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1373.473246] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1373.474123] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2764e713-6ec4-45a9-bd93-2e8732a13a59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.482545] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1373.482764] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f09d51e-d99e-4752-b116-ebd4ea2d0c47 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.489273] env[68244]: DEBUG oslo_vmware.api [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1373.489273] env[68244]: value = "task-2781459" [ 1373.489273] env[68244]: _type = "Task" [ 1373.489273] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.496740] env[68244]: DEBUG oslo_vmware.api [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.728057] env[68244]: INFO nova.compute.manager [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Swapping old allocation on dict_keys(['b885cb16-3bd4-46d8-abd9-28a1bf1058e3']) held by migration d31624ff-bf87-4029-94b1-989943b9a49f for instance [ 1373.756231] env[68244]: DEBUG nova.scheduler.client.report [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Overwriting current allocation {'allocations': {'b885cb16-3bd4-46d8-abd9-28a1bf1058e3': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 180}}, 'project_id': '9a16375181ca41fead00ee23bd2a9af0', 'user_id': '0dd4fe2dbf154c1791b0bf2e9744629a', 'consumer_generation': 1} on consumer ae4d8900-3185-4747-ba8d-fe334d9e3237 {{(pid=68244) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1373.832561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.832766] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1373.833015] env[68244]: DEBUG nova.network.neutron [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1373.884035] env[68244]: DEBUG oslo_concurrency.lockutils [req-cdf1174d-cced-4f1c-80ac-c4e25f781ea5 req-2ab387cf-e154-4ec9-95af-6569492f4a7a service nova] Releasing lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1373.904806] env[68244]: DEBUG nova.scheduler.client.report [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1374.001326] env[68244]: DEBUG oslo_vmware.api [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781459, 'name': PowerOffVM_Task, 'duration_secs': 0.210792} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.001585] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1374.001750] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1374.001996] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dfc6057-db88-482c-a044-9b54ebe07f19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.073171] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1374.073391] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1374.073543] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Deleting the datastore file [datastore2] d4adee86-43f1-4d6f-a4a5-8cce39e1f03e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1374.073796] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6c83756-6809-40d5-98e4-df1f5250e256 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.080997] env[68244]: DEBUG oslo_vmware.api [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for the task: (returnval){ [ 1374.080997] env[68244]: value = "task-2781461" [ 1374.080997] env[68244]: _type = "Task" [ 1374.080997] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.088928] env[68244]: DEBUG oslo_vmware.api [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.409481] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.411055] env[68244]: DEBUG nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1374.412707] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.845s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.533111] env[68244]: DEBUG nova.network.neutron [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [{"id": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "address": "fa:16:3e:d7:d5:f7", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1662b1e-6c", "ovs_interfaceid": "e1662b1e-6c27-4782-bc05-758a8a5c71f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.592626] env[68244]: DEBUG oslo_vmware.api [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Task: {'id': task-2781461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169975} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.592982] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1374.593260] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1374.593449] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1374.593633] env[68244]: INFO nova.compute.manager [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1374.593875] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1374.594114] env[68244]: DEBUG nova.compute.manager [-] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1374.594214] env[68244]: DEBUG nova.network.neutron [-] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1374.916750] env[68244]: DEBUG nova.compute.utils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1374.920145] env[68244]: INFO nova.compute.claims [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.923894] env[68244]: DEBUG nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1374.924080] env[68244]: DEBUG nova.network.neutron [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1374.964765] env[68244]: DEBUG nova.policy [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a17d79ef7b14c178c98a60499967c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e3ca107ec07495cb1876bd472e0cd8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1375.039165] env[68244]: DEBUG oslo_concurrency.lockutils [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-ae4d8900-3185-4747-ba8d-fe334d9e3237" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1375.039165] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1375.039165] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68845bec-2634-4061-8ff1-b739a69a22d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.047041] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1375.047041] env[68244]: value = "task-2781462" [ 1375.047041] env[68244]: _type = "Task" [ 1375.047041] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.056483] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.093703] env[68244]: DEBUG nova.compute.manager [req-e9c3d912-1599-4206-ae1c-4ee96f1d1d84 req-1f4e54ff-3e0b-480d-9cfe-157b6bfb395c service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Received event network-vif-deleted-9391d531-b415-45c2-8310-0bed3f83b727 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1375.093986] env[68244]: INFO nova.compute.manager [req-e9c3d912-1599-4206-ae1c-4ee96f1d1d84 req-1f4e54ff-3e0b-480d-9cfe-157b6bfb395c service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Neutron deleted interface 9391d531-b415-45c2-8310-0bed3f83b727; detaching it from the instance and deleting it from the info cache [ 1375.094474] env[68244]: DEBUG nova.network.neutron [req-e9c3d912-1599-4206-ae1c-4ee96f1d1d84 req-1f4e54ff-3e0b-480d-9cfe-157b6bfb395c service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.221798] env[68244]: DEBUG nova.network.neutron [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Successfully created port: 1992261e-0c11-4394-a63d-fbb8739d97f8 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.424913] env[68244]: DEBUG nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1375.429232] env[68244]: INFO nova.compute.resource_tracker [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating resource usage from migration c063d393-46a5-45f5-ab79-238d0c7f4655 [ 1375.539979] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f3d7a5-af8b-4dda-bc41-170de4a8d1fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.550769] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1a9090-99a4-40d9-afb6-60a8ed96be94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.558229] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781462, 'name': PowerOffVM_Task, 'duration_secs': 0.216014} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.582813] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.583516] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1375.584053] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1375.584053] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1375.584199] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1375.584329] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1375.584479] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1375.584691] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1375.584853] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1375.585031] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1375.585209] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1375.585377] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1375.590462] env[68244]: DEBUG nova.network.neutron [-] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.591715] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5235f417-1bc6-4228-a581-2529c6fb0c2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.602291] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552f9e3f-1d57-4a16-9b89-698ec8599f19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.605485] env[68244]: INFO nova.compute.manager [-] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Took 1.01 seconds to deallocate network for instance. [ 1375.607172] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7e44d16-d90e-41c9-8cc1-d53079e0e4cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.616608] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9738183-7ff6-4437-a3b7-b05f6136dacf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.620463] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1375.620463] env[68244]: value = "task-2781463" [ 1375.620463] env[68244]: _type = "Task" [ 1375.620463] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.626920] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2794465a-f975-4159-aa4a-4d562bdf5d33 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.645366] env[68244]: DEBUG nova.compute.provider_tree [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.652106] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781463, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.665949] env[68244]: DEBUG nova.compute.manager [req-e9c3d912-1599-4206-ae1c-4ee96f1d1d84 req-1f4e54ff-3e0b-480d-9cfe-157b6bfb395c service nova] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Detach interface failed, port_id=9391d531-b415-45c2-8310-0bed3f83b727, reason: Instance d4adee86-43f1-4d6f-a4a5-8cce39e1f03e could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1376.077515] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1376.115095] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.132720] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781463, 'name': ReconfigVM_Task, 'duration_secs': 0.162438} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.133701] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f7922f-a292-4616-8d0e-693814934060 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.151910] env[68244]: DEBUG nova.scheduler.client.report [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1376.156025] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1376.156257] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.156419] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1376.156602] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.156752] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1376.156900] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1376.157121] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1376.157286] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1376.157451] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1376.157609] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1376.157780] env[68244]: DEBUG nova.virt.hardware [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1376.158809] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a817008-896b-4a84-a9e2-23deebd693c9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.164314] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1376.164314] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ae3208-8f1b-79f2-621c-ae63bd4b41a9" [ 1376.164314] env[68244]: _type = "Task" [ 1376.164314] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.172543] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ae3208-8f1b-79f2-621c-ae63bd4b41a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.438576] env[68244]: DEBUG nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1376.466313] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1376.466454] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.466654] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1376.466787] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.466883] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1376.467062] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1376.467465] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1376.467465] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1376.467577] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1376.467733] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1376.467901] env[68244]: DEBUG nova.virt.hardware [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1376.468768] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8282684-9121-470c-a26b-9c93f36305df {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.476638] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c2f755-baf8-46eb-b266-f3e971dffcfc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.580695] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.598782] env[68244]: DEBUG nova.compute.manager [req-fdc99c18-2428-4027-8141-936dc8156ce6 req-2c30f1c4-ca2b-46bd-a89a-43781451edb7 service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Received event network-vif-plugged-1992261e-0c11-4394-a63d-fbb8739d97f8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1376.599026] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdc99c18-2428-4027-8141-936dc8156ce6 req-2c30f1c4-ca2b-46bd-a89a-43781451edb7 service nova] Acquiring lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.599253] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdc99c18-2428-4027-8141-936dc8156ce6 req-2c30f1c4-ca2b-46bd-a89a-43781451edb7 service nova] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.599391] env[68244]: DEBUG oslo_concurrency.lockutils [req-fdc99c18-2428-4027-8141-936dc8156ce6 req-2c30f1c4-ca2b-46bd-a89a-43781451edb7 service nova] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.599557] env[68244]: DEBUG nova.compute.manager [req-fdc99c18-2428-4027-8141-936dc8156ce6 req-2c30f1c4-ca2b-46bd-a89a-43781451edb7 service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] No waiting events found dispatching network-vif-plugged-1992261e-0c11-4394-a63d-fbb8739d97f8 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1376.599722] env[68244]: WARNING nova.compute.manager [req-fdc99c18-2428-4027-8141-936dc8156ce6 req-2c30f1c4-ca2b-46bd-a89a-43781451edb7 service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Received unexpected event network-vif-plugged-1992261e-0c11-4394-a63d-fbb8739d97f8 for instance with vm_state building and task_state spawning. [ 1376.656324] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.244s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.656535] env[68244]: INFO nova.compute.manager [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Migrating [ 1376.662812] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.548s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.663285] env[68244]: DEBUG nova.objects.instance [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lazy-loading 'resources' on Instance uuid d4adee86-43f1-4d6f-a4a5-8cce39e1f03e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1376.684721] env[68244]: DEBUG nova.network.neutron [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Successfully updated port: 1992261e-0c11-4394-a63d-fbb8739d97f8 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1376.688634] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ae3208-8f1b-79f2-621c-ae63bd4b41a9, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.694542] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1376.695864] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a125354-59bb-4dab-b126-b2b54fa7d6d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.714798] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1376.714798] env[68244]: value = "task-2781464" [ 1376.714798] env[68244]: _type = "Task" [ 1376.714798] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.722523] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.180408] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.180615] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1377.180828] env[68244]: DEBUG nova.network.neutron [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1377.189931] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-ee819188-5e97-4a5f-80a1-3901dfe65f6e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.189931] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-ee819188-5e97-4a5f-80a1-3901dfe65f6e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1377.190087] env[68244]: DEBUG nova.network.neutron [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1377.226144] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781464, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.276208] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e8c806-bc13-49aa-a2b8-266a7490502c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.283979] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb30be5-e4f5-40d4-bb0e-efae83c4305b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.315459] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0120505e-9858-42d5-be18-e70577310533 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.323493] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0420b35b-a517-46bb-9857-42ebc00338c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.337620] env[68244]: DEBUG nova.compute.provider_tree [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1377.721557] env[68244]: DEBUG nova.network.neutron [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.727904] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781464, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.841064] env[68244]: DEBUG nova.scheduler.client.report [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1377.847267] env[68244]: DEBUG nova.network.neutron [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Updating instance_info_cache with network_info: [{"id": "1992261e-0c11-4394-a63d-fbb8739d97f8", "address": "fa:16:3e:5d:28:ba", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1992261e-0c", "ovs_interfaceid": "1992261e-0c11-4394-a63d-fbb8739d97f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.889946] env[68244]: DEBUG nova.network.neutron [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [{"id": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "address": "fa:16:3e:79:b9:93", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd5bf8e6-bd", "ovs_interfaceid": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.229228] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781464, 'name': ReconfigVM_Task, 'duration_secs': 1.198648} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.229555] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1378.230396] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fbbb0d-9914-430b-83e2-666e087c7cea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.252039] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1378.252558] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f62207a-87a5-427b-85e0-e50f3d3101d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.270621] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1378.270621] env[68244]: value = "task-2781465" [ 1378.270621] env[68244]: _type = "Task" [ 1378.270621] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.281226] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.345549] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.683s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1378.347977] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.767s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.348176] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1378.348331] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1378.348785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-ee819188-5e97-4a5f-80a1-3901dfe65f6e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1378.349073] env[68244]: DEBUG nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Instance network_info: |[{"id": "1992261e-0c11-4394-a63d-fbb8739d97f8", "address": "fa:16:3e:5d:28:ba", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1992261e-0c", "ovs_interfaceid": "1992261e-0c11-4394-a63d-fbb8739d97f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1378.350477] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feee82e8-9fb5-4e7a-a8cd-dd7c4da16f56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.352706] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:28:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1992261e-0c11-4394-a63d-fbb8739d97f8', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1378.360220] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating folder: Project (1e3ca107ec07495cb1876bd472e0cd8a). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.360929] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9071753-5af8-4b18-a8b9-9797b4c30b1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.367790] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c93d088-45f8-44c9-aeb5-c8f964a27d07 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.373239] env[68244]: INFO nova.scheduler.client.report [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Deleted allocations for instance d4adee86-43f1-4d6f-a4a5-8cce39e1f03e [ 1378.374283] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created folder: Project (1e3ca107ec07495cb1876bd472e0cd8a) in parent group-v558876. [ 1378.374465] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating folder: Instances. Parent ref: group-v559192. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.377070] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-871299ff-dc0c-4d87-a619-ecd4a273fadc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.387284] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74b1f9e-5a50-4118-a095-58dffa106db4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.393418] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1378.396473] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a43df3-2fc4-440f-a5ef-9e8a57408cf8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.399500] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created folder: Instances in parent group-v559192. [ 1378.400375] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1378.400375] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1378.400756] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d230be9-4a58-418e-ad1b-d240dbdeb218 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.441594] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179897MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1378.441942] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1378.442036] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.447733] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1378.447733] env[68244]: value = "task-2781468" [ 1378.447733] env[68244]: _type = "Task" [ 1378.447733] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.455443] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781468, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.626210] env[68244]: DEBUG nova.compute.manager [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Received event network-changed-1992261e-0c11-4394-a63d-fbb8739d97f8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1378.626210] env[68244]: DEBUG nova.compute.manager [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Refreshing instance network info cache due to event network-changed-1992261e-0c11-4394-a63d-fbb8739d97f8. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1378.626362] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] Acquiring lock "refresh_cache-ee819188-5e97-4a5f-80a1-3901dfe65f6e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.627058] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] Acquired lock "refresh_cache-ee819188-5e97-4a5f-80a1-3901dfe65f6e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1378.627058] env[68244]: DEBUG nova.network.neutron [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Refreshing network info cache for port 1992261e-0c11-4394-a63d-fbb8739d97f8 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1378.780297] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781465, 'name': ReconfigVM_Task, 'duration_secs': 0.317989} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.780719] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Reconfigured VM instance instance-00000070 to attach disk [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237/ae4d8900-3185-4747-ba8d-fe334d9e3237.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1378.781394] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3a2af7-01c2-4bb6-8d7f-86b759dc2b58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.799731] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d939adcc-9b8e-4553-b8c5-63d2d580be3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.817106] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7c12d2-f08c-40f0-9446-f8b0275a307a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.834921] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128176a9-6e49-41d2-8535-d4f033dbe1d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.842201] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1378.842435] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7fa5183-cbdf-4c06-a117-10fa3e9af80e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.847891] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1378.847891] env[68244]: value = "task-2781469" [ 1378.847891] env[68244]: _type = "Task" [ 1378.847891] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.854556] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781469, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.882663] env[68244]: DEBUG oslo_concurrency.lockutils [None req-231ec109-44d0-4570-99e7-237c837e9f70 tempest-AttachVolumeTestJSON-452589214 tempest-AttachVolumeTestJSON-452589214-project-member] Lock "d4adee86-43f1-4d6f-a4a5-8cce39e1f03e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.916s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1378.958956] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781468, 'name': CreateVM_Task, 'duration_secs': 0.336555} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.958956] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1378.959283] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.959516] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1378.959838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1378.960707] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91f1e191-0041-4c7f-ae03-61b7bde8895e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.965544] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1378.965544] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b1bbed-e392-5cb5-613e-4dcefa78ac07" [ 1378.965544] env[68244]: _type = "Task" [ 1378.965544] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.973423] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b1bbed-e392-5cb5-613e-4dcefa78ac07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.337691] env[68244]: DEBUG nova.network.neutron [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Updated VIF entry in instance network info cache for port 1992261e-0c11-4394-a63d-fbb8739d97f8. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1379.338178] env[68244]: DEBUG nova.network.neutron [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Updating instance_info_cache with network_info: [{"id": "1992261e-0c11-4394-a63d-fbb8739d97f8", "address": "fa:16:3e:5d:28:ba", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1992261e-0c", "ovs_interfaceid": "1992261e-0c11-4394-a63d-fbb8739d97f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.360782] env[68244]: DEBUG oslo_vmware.api [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781469, 'name': PowerOnVM_Task, 'duration_secs': 0.372297} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.360782] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.451177] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Applying migration context for instance 5519079a-d2a5-48c5-921c-199e0fc60aa3 as it has an incoming, in-progress migration c063d393-46a5-45f5-ab79-238d0c7f4655. Migration status is migrating {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1379.452231] env[68244]: INFO nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating resource usage from migration c063d393-46a5-45f5-ab79-238d0c7f4655 [ 1379.476332] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b1bbed-e392-5cb5-613e-4dcefa78ac07, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.477356] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 75bec02f-82f7-4e8d-81da-3c511588be29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1379.477533] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 77ba8e47-10bb-4630-bd89-067f5ad7bad9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1379.477800] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1379.477800] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ae4d8900-3185-4747-ba8d-fe334d9e3237 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1379.477914] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ee819188-5e97-4a5f-80a1-3901dfe65f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1379.477997] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Migration c063d393-46a5-45f5-ab79-238d0c7f4655 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1379.478128] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 5519079a-d2a5-48c5-921c-199e0fc60aa3 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1379.478313] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1379.478444] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1379.480733] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1379.480986] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1379.481420] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.481420] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1379.481624] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.482389] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1031e054-de5f-4a1c-996f-544910d6c8de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.491213] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.491213] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1379.491503] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0db8fa18-1722-41ff-a542-ce0d92008270 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.501086] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1379.501086] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e3f825-95a6-fe36-91e4-7ff84c61e3a9" [ 1379.501086] env[68244]: _type = "Task" [ 1379.501086] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.509581] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e3f825-95a6-fe36-91e4-7ff84c61e3a9, 'name': SearchDatastore_Task, 'duration_secs': 0.009366} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.512938] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38816339-1c89-4f6c-a098-1186ddce7575 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.518191] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1379.518191] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c5493-8325-a9ee-aca2-762ba7c9ebe0" [ 1379.518191] env[68244]: _type = "Task" [ 1379.518191] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.527098] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c5493-8325-a9ee-aca2-762ba7c9ebe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.586161] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7115ed2a-b176-4bf1-82fb-a8bed20d69bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.593745] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47a8f6a-9d05-4059-bbbf-0213f12715f6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.624157] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ecfe59-fa1e-4565-a373-03c4d2d8913a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.630856] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e3acf8-a081-4650-b59d-2dbaad66baab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.643414] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.841835] env[68244]: DEBUG oslo_concurrency.lockutils [req-8f2975c1-fc5d-42a4-9b57-10072387823b req-558c0151-5924-401b-9e60-39f3b90eab1f service nova] Releasing lock "refresh_cache-ee819188-5e97-4a5f-80a1-3901dfe65f6e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1379.913060] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2866315-2370-480d-b5ea-b86a470cb277 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.932371] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance '5519079a-d2a5-48c5-921c-199e0fc60aa3' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1380.027834] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525c5493-8325-a9ee-aca2-762ba7c9ebe0, 'name': SearchDatastore_Task, 'duration_secs': 0.008795} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.028084] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1380.028315] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ee819188-5e97-4a5f-80a1-3901dfe65f6e/ee819188-5e97-4a5f-80a1-3901dfe65f6e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1380.028567] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-104ce6b3-858b-457d-b84b-ebac7e7b6ce1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.035898] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1380.035898] env[68244]: value = "task-2781471" [ 1380.035898] env[68244]: _type = "Task" [ 1380.035898] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.043624] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.146583] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1380.371423] env[68244]: INFO nova.compute.manager [None req-64853bef-2cf9-423a-9484-d6590d3f0bf6 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance to original state: 'active' [ 1380.438789] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.439201] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8799ecc5-1ff3-4df0-b89d-7d5dcfc41925 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.448480] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1380.448480] env[68244]: value = "task-2781472" [ 1380.448480] env[68244]: _type = "Task" [ 1380.448480] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.457490] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.546455] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781471, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438153} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.546743] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] ee819188-5e97-4a5f-80a1-3901dfe65f6e/ee819188-5e97-4a5f-80a1-3901dfe65f6e.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1380.546967] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1380.547261] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2222efc4-a3ca-4d34-991a-38cb19330459 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.554253] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1380.554253] env[68244]: value = "task-2781473" [ 1380.554253] env[68244]: _type = "Task" [ 1380.554253] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.562894] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.651978] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1380.652270] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.210s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.959724] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781472, 'name': PowerOffVM_Task, 'duration_secs': 0.169189} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.960060] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1380.960182] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance '5519079a-d2a5-48c5-921c-199e0fc60aa3' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1381.064462] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063394} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.064725] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1381.065591] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5344c9dc-2780-47ec-9731-a84be806646b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.092529] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] ee819188-5e97-4a5f-80a1-3901dfe65f6e/ee819188-5e97-4a5f-80a1-3901dfe65f6e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1381.094231] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-424a621b-6e7b-4286-8a3a-8190fb65b490 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.125995] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1381.125995] env[68244]: value = "task-2781474" [ 1381.125995] env[68244]: _type = "Task" [ 1381.125995] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.135108] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781474, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.466906] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1381.467187] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1381.467363] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1381.467547] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1381.467693] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1381.467840] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1381.468109] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1381.468296] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1381.468468] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1381.468628] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1381.468804] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1381.474060] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f66859dc-bc4a-4322-9297-f4b20daf66a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.490059] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1381.490059] env[68244]: value = "task-2781475" [ 1381.490059] env[68244]: _type = "Task" [ 1381.490059] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.497945] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.620640] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "ae4d8900-3185-4747-ba8d-fe334d9e3237" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1381.620972] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1381.621152] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1381.621346] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1381.621521] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.623855] env[68244]: INFO nova.compute.manager [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Terminating instance [ 1381.635385] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781474, 'name': ReconfigVM_Task, 'duration_secs': 0.275063} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.636289] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Reconfigured VM instance instance-00000074 to attach disk [datastore2] ee819188-5e97-4a5f-80a1-3901dfe65f6e/ee819188-5e97-4a5f-80a1-3901dfe65f6e.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1381.636893] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-845c652a-2fcd-4f87-bfb9-d9eb7194ea72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.643721] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1381.643721] env[68244]: value = "task-2781476" [ 1381.643721] env[68244]: _type = "Task" [ 1381.643721] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.651838] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781476, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.652175] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.652362] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.652514] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.652666] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.652809] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.652987] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.653142] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1381.999377] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781475, 'name': ReconfigVM_Task, 'duration_secs': 0.431489} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.999697] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance '5519079a-d2a5-48c5-921c-199e0fc60aa3' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1382.127546] env[68244]: DEBUG nova.compute.manager [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1382.127821] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1382.128756] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b087d3-473f-496f-a268-f25ea5a1a60c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.136345] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1382.136687] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99af6006-fe95-4f9a-a735-234dd63ba56f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.142524] env[68244]: DEBUG oslo_vmware.api [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1382.142524] env[68244]: value = "task-2781477" [ 1382.142524] env[68244]: _type = "Task" [ 1382.142524] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.156061] env[68244]: DEBUG oslo_vmware.api [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.159405] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781476, 'name': Rename_Task, 'duration_secs': 0.496538} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.159677] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.159930] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc06b9df-60be-45ba-8146-3e0b5e345f0d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.165022] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1382.165022] env[68244]: value = "task-2781478" [ 1382.165022] env[68244]: _type = "Task" [ 1382.165022] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.173017] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.506250] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1382.506545] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.506765] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1382.507018] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.507222] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1382.507425] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1382.507683] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1382.507891] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1382.508131] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1382.508354] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1382.508582] env[68244]: DEBUG nova.virt.hardware [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1382.513985] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1382.514319] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f5e5532-a5a9-40b9-bcf2-0ff7196f7950 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.532948] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1382.532948] env[68244]: value = "task-2781479" [ 1382.532948] env[68244]: _type = "Task" [ 1382.532948] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.542028] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.651612] env[68244]: DEBUG oslo_vmware.api [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781477, 'name': PowerOffVM_Task, 'duration_secs': 0.202186} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.651899] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1382.652082] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1382.652348] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27161c08-3d66-4fcf-9524-5054da4a201c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.673403] env[68244]: DEBUG oslo_vmware.api [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781478, 'name': PowerOnVM_Task, 'duration_secs': 0.433574} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.673656] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1382.673856] env[68244]: INFO nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Took 6.24 seconds to spawn the instance on the hypervisor. [ 1382.674081] env[68244]: DEBUG nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1382.674827] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6513e8e4-a8c1-4b46-ab8c-6cdc2cc655aa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.715129] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1382.715459] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1382.715723] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleting the datastore file [datastore2] ae4d8900-3185-4747-ba8d-fe334d9e3237 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1382.716037] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fc81c4c-bca5-487e-b3db-214b33b99936 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.722517] env[68244]: DEBUG oslo_vmware.api [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1382.722517] env[68244]: value = "task-2781482" [ 1382.722517] env[68244]: _type = "Task" [ 1382.722517] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.731023] env[68244]: DEBUG oslo_vmware.api [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781482, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.044180] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781479, 'name': ReconfigVM_Task, 'duration_secs': 0.208832} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.044456] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1383.045218] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e3fb1e-35fc-42ed-9f6c-9cb80c63acff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.066690] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6/volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.066956] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2717f712-e63f-44f1-b86d-f6b2dc9b1c87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.084339] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1383.084339] env[68244]: value = "task-2781483" [ 1383.084339] env[68244]: _type = "Task" [ 1383.084339] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.091749] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781483, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.192077] env[68244]: INFO nova.compute.manager [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Took 11.95 seconds to build instance. [ 1383.231669] env[68244]: DEBUG oslo_vmware.api [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781482, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156422} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.231990] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1383.232207] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1383.232390] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1383.232563] env[68244]: INFO nova.compute.manager [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1383.232795] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1383.233008] env[68244]: DEBUG nova.compute.manager [-] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1383.233118] env[68244]: DEBUG nova.network.neutron [-] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1383.595452] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.620074] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.671363] env[68244]: DEBUG nova.compute.manager [req-e5acf230-617d-4f1c-8dab-b97e8918509b req-e5fbb0ed-a2d0-4c3a-8a04-9bb7943d3add service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Received event network-vif-deleted-e1662b1e-6c27-4782-bc05-758a8a5c71f2 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1383.671597] env[68244]: INFO nova.compute.manager [req-e5acf230-617d-4f1c-8dab-b97e8918509b req-e5fbb0ed-a2d0-4c3a-8a04-9bb7943d3add service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Neutron deleted interface e1662b1e-6c27-4782-bc05-758a8a5c71f2; detaching it from the instance and deleting it from the info cache [ 1383.671816] env[68244]: DEBUG nova.network.neutron [req-e5acf230-617d-4f1c-8dab-b97e8918509b req-e5fbb0ed-a2d0-4c3a-8a04-9bb7943d3add service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.693901] env[68244]: DEBUG oslo_concurrency.lockutils [None req-08fcfd76-9ba6-4332-9ca1-2b533cc1eca4 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.469s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.694299] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.074s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.694555] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.694798] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.695037] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.696958] env[68244]: INFO nova.compute.manager [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Terminating instance [ 1384.096519] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781483, 'name': ReconfigVM_Task, 'duration_secs': 0.7471} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.096812] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6/volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.097157] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance '5519079a-d2a5-48c5-921c-199e0fc60aa3' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1384.146504] env[68244]: DEBUG nova.network.neutron [-] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.174958] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3158b77-4415-4bf9-8c98-8d0ef708578d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.184768] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5549865-304d-4fca-899e-6d460c0f2381 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.200432] env[68244]: DEBUG nova.compute.manager [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1384.200645] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1384.201613] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2738a1-75a2-420b-8b17-00931507ce9f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.213530] env[68244]: DEBUG nova.compute.manager [req-e5acf230-617d-4f1c-8dab-b97e8918509b req-e5fbb0ed-a2d0-4c3a-8a04-9bb7943d3add service nova] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Detach interface failed, port_id=e1662b1e-6c27-4782-bc05-758a8a5c71f2, reason: Instance ae4d8900-3185-4747-ba8d-fe334d9e3237 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1384.215733] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1384.215972] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7773ffa5-a634-43ca-8ffe-3b18c321ab27 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.222226] env[68244]: DEBUG oslo_vmware.api [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1384.222226] env[68244]: value = "task-2781484" [ 1384.222226] env[68244]: _type = "Task" [ 1384.222226] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.230083] env[68244]: DEBUG oslo_vmware.api [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.604086] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccacaa78-cb46-4ec7-ad68-7bb6ccca7103 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.624729] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e304f6-98db-4892-9323-34a1f9cfa56c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.641584] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance '5519079a-d2a5-48c5-921c-199e0fc60aa3' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1384.649067] env[68244]: INFO nova.compute.manager [-] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Took 1.42 seconds to deallocate network for instance. [ 1384.732090] env[68244]: DEBUG oslo_vmware.api [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781484, 'name': PowerOffVM_Task, 'duration_secs': 0.169296} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.732359] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1384.732530] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1384.732764] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b30cb256-48fd-44bd-a011-40fd1882a250 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.793871] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1384.794108] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1384.794296] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleting the datastore file [datastore2] ee819188-5e97-4a5f-80a1-3901dfe65f6e {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1384.794555] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5511312c-4d97-442c-8654-e46f68b0cc51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.800895] env[68244]: DEBUG oslo_vmware.api [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1384.800895] env[68244]: value = "task-2781486" [ 1384.800895] env[68244]: _type = "Task" [ 1384.800895] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.808695] env[68244]: DEBUG oslo_vmware.api [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.155464] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.155739] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.155938] env[68244]: DEBUG nova.objects.instance [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'resources' on Instance uuid ae4d8900-3185-4747-ba8d-fe334d9e3237 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.310356] env[68244]: DEBUG oslo_vmware.api [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155924} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.310555] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1385.310728] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1385.310906] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1385.311114] env[68244]: INFO nova.compute.manager [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1385.311357] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1385.311551] env[68244]: DEBUG nova.compute.manager [-] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1385.311646] env[68244]: DEBUG nova.network.neutron [-] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1385.696300] env[68244]: DEBUG nova.compute.manager [req-24ba044a-9cf5-4d3d-93fc-b1df28e83ef6 req-77ee0e67-1ab7-4cd0-940f-1feac7e15945 service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Received event network-vif-deleted-1992261e-0c11-4394-a63d-fbb8739d97f8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1385.696300] env[68244]: INFO nova.compute.manager [req-24ba044a-9cf5-4d3d-93fc-b1df28e83ef6 req-77ee0e67-1ab7-4cd0-940f-1feac7e15945 service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Neutron deleted interface 1992261e-0c11-4394-a63d-fbb8739d97f8; detaching it from the instance and deleting it from the info cache [ 1385.696588] env[68244]: DEBUG nova.network.neutron [req-24ba044a-9cf5-4d3d-93fc-b1df28e83ef6 req-77ee0e67-1ab7-4cd0-940f-1feac7e15945 service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.754228] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c35e95c-9e5a-4b1f-a4bb-03ec35330754 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.762533] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aea17cd-1819-4f43-8535-1f3357e9cf72 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.794929] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b72db2-610c-4a6b-af1d-660c2ef72d59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.802191] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a034148c-a579-4c99-9217-a4fc243bf18b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.815658] env[68244]: DEBUG nova.compute.provider_tree [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.056723] env[68244]: DEBUG nova.network.neutron [-] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.198573] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d760030-5e71-40b5-9cbb-3ebea1a3df8d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.208420] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d4d679-fa56-46d5-acb5-ebb45843b82c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.236400] env[68244]: DEBUG nova.compute.manager [req-24ba044a-9cf5-4d3d-93fc-b1df28e83ef6 req-77ee0e67-1ab7-4cd0-940f-1feac7e15945 service nova] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Detach interface failed, port_id=1992261e-0c11-4394-a63d-fbb8739d97f8, reason: Instance ee819188-5e97-4a5f-80a1-3901dfe65f6e could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1386.273908] env[68244]: DEBUG nova.network.neutron [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Port cd5bf8e6-bd7d-473b-889f-4d23b1c887ab binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1386.319226] env[68244]: DEBUG nova.scheduler.client.report [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1386.560385] env[68244]: INFO nova.compute.manager [-] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Took 1.25 seconds to deallocate network for instance. [ 1386.824257] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.668s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1386.843150] env[68244]: INFO nova.scheduler.client.report [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted allocations for instance ae4d8900-3185-4747-ba8d-fe334d9e3237 [ 1387.067435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1387.067728] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1387.067951] env[68244]: DEBUG nova.objects.instance [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'resources' on Instance uuid ee819188-5e97-4a5f-80a1-3901dfe65f6e {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1387.296605] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1387.296980] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1387.297063] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.351365] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ab3a38a3-1e31-4eb8-9a6e-5bc363549ea9 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "ae4d8900-3185-4747-ba8d-fe334d9e3237" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.730s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.651240] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2973e6-9a4c-4d12-a5a0-22770889028c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.659308] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c79a8d6-97bb-438b-9b87-b456b932650d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.689241] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74f0615-a2a5-4ccc-a75f-b857d559bb2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.696411] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd2e270-2ad0-4d40-b8b3-605aacd16ff6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.710568] env[68244]: DEBUG nova.compute.provider_tree [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.214850] env[68244]: DEBUG nova.scheduler.client.report [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1388.328577] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.328810] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1388.328941] env[68244]: DEBUG nova.network.neutron [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1388.612201] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "8aafc14e-418a-4c43-80b9-54da13550c32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1388.612556] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1388.718889] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.651s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.740599] env[68244]: INFO nova.scheduler.client.report [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted allocations for instance ee819188-5e97-4a5f-80a1-3901dfe65f6e [ 1389.115109] env[68244]: DEBUG nova.network.neutron [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [{"id": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "address": "fa:16:3e:79:b9:93", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd5bf8e6-bd", "ovs_interfaceid": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.116572] env[68244]: DEBUG nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1389.247748] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e457bc00-d07a-4c91-b79b-3a04fddf8859 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "ee819188-5e97-4a5f-80a1-3901dfe65f6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.553s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.620730] env[68244]: DEBUG oslo_concurrency.lockutils [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1389.642737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.642737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.644361] env[68244]: INFO nova.compute.claims [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1390.131264] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70386640-efa6-42f0-aba8-c92b734ccd0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.138988] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14457ef-197b-4d6f-bec6-d267e384e19d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.643653] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.643949] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.741171] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e823485d-ee3d-4008-9cfa-d89415e27cf9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.748967] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5f3e02-b901-43fb-a4b6-dc907b615702 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.778095] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c263fd8a-7f73-42b4-a281-e0e86291e3ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.784954] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a228bb-8197-4c90-bed8-52fbfad6de64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.797584] env[68244]: DEBUG nova.compute.provider_tree [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.124964] env[68244]: INFO nova.compute.manager [None req-db610144-d284-4c35-913f-2c5c52e769ea tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Get console output [ 1391.124964] env[68244]: WARNING nova.virt.vmwareapi.driver [None req-db610144-d284-4c35-913f-2c5c52e769ea tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] The console log is missing. Check your VSPC configuration [ 1391.146490] env[68244]: DEBUG nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1391.244487] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3cc805-ffcb-4687-95bd-feecd9d906b6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.263863] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6398d4-6fd2-4d8c-9a49-0089dba03e6a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.270324] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance '5519079a-d2a5-48c5-921c-199e0fc60aa3' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1391.300019] env[68244]: DEBUG nova.scheduler.client.report [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1391.667803] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1391.777881] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.778215] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74ae429f-5f3e-42bf-93d4-639bbdb22c71 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.785983] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1391.785983] env[68244]: value = "task-2781488" [ 1391.785983] env[68244]: _type = "Task" [ 1391.785983] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.793230] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.804994] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.162s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.805527] env[68244]: DEBUG nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1391.808194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.141s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.809611] env[68244]: INFO nova.compute.claims [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1392.199033] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.201661] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.201661] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.201661] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.201661] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.204034] env[68244]: INFO nova.compute.manager [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Terminating instance [ 1392.296057] env[68244]: DEBUG oslo_vmware.api [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781488, 'name': PowerOnVM_Task, 'duration_secs': 0.37031} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.296057] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1392.296057] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6b62ce-98f3-4382-adae-b1c39449c000 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance '5519079a-d2a5-48c5-921c-199e0fc60aa3' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1392.313564] env[68244]: DEBUG nova.compute.utils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1392.316469] env[68244]: DEBUG nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1392.316728] env[68244]: DEBUG nova.network.neutron [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1392.358609] env[68244]: DEBUG nova.policy [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dd4fe2dbf154c1791b0bf2e9744629a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a16375181ca41fead00ee23bd2a9af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1392.624169] env[68244]: DEBUG nova.network.neutron [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Successfully created port: 843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.707464] env[68244]: DEBUG nova.compute.manager [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1392.707857] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1392.708690] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0874ca72-606b-4656-8513-68a8f9e20dc1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.718512] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.718760] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63505d64-2ee7-4d10-b648-fb47d9a9bae3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.725403] env[68244]: DEBUG oslo_vmware.api [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1392.725403] env[68244]: value = "task-2781489" [ 1392.725403] env[68244]: _type = "Task" [ 1392.725403] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.733195] env[68244]: DEBUG oslo_vmware.api [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.823295] env[68244]: DEBUG nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1392.934906] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89970f5-0185-43e0-b8c5-351965274207 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.942872] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0edb799-0977-43bc-902a-81c2173f09ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.973061] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42ec4b3-9c68-48c3-a3cd-5607abfa0fe6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.980130] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa778a10-da03-43df-b58c-c5bb5e1b314f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.992691] env[68244]: DEBUG nova.compute.provider_tree [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1393.236280] env[68244]: DEBUG oslo_vmware.api [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781489, 'name': PowerOffVM_Task, 'duration_secs': 0.225888} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.236571] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.236760] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.237032] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-665d4a22-c96b-42c0-8c21-92ca043a191f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.320451] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.320698] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.320869] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleting the datastore file [datastore2] 77ba8e47-10bb-4630-bd89-067f5ad7bad9 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.321275] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68c5bfd0-ba8e-4714-be54-c5beb88c64e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.331667] env[68244]: DEBUG oslo_vmware.api [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for the task: (returnval){ [ 1393.331667] env[68244]: value = "task-2781491" [ 1393.331667] env[68244]: _type = "Task" [ 1393.331667] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.342080] env[68244]: DEBUG oslo_vmware.api [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.495774] env[68244]: DEBUG nova.scheduler.client.report [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1393.834363] env[68244]: DEBUG nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1393.845855] env[68244]: DEBUG oslo_vmware.api [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Task: {'id': task-2781491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133718} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.846041] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1393.846255] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1393.846255] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1393.846463] env[68244]: INFO nova.compute.manager [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1393.846690] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1393.846866] env[68244]: DEBUG nova.compute.manager [-] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1393.846958] env[68244]: DEBUG nova.network.neutron [-] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1393.868151] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1393.868790] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.868790] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1393.868790] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.868933] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1393.869039] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1393.869363] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1393.869466] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1393.869634] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1393.869796] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1393.869963] env[68244]: DEBUG nova.virt.hardware [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1393.870830] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3325d1-2a29-49ee-b155-a96999dddf51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.882785] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7cc747-1544-4194-b7dc-45692187987d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.983796] env[68244]: DEBUG nova.compute.manager [req-5fead7c3-9918-4dfa-91e2-15ae01d91649 req-b38f829e-8467-455e-8f93-826bc81dae09 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Received event network-vif-plugged-843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1393.984046] env[68244]: DEBUG oslo_concurrency.lockutils [req-5fead7c3-9918-4dfa-91e2-15ae01d91649 req-b38f829e-8467-455e-8f93-826bc81dae09 service nova] Acquiring lock "8aafc14e-418a-4c43-80b9-54da13550c32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.984316] env[68244]: DEBUG oslo_concurrency.lockutils [req-5fead7c3-9918-4dfa-91e2-15ae01d91649 req-b38f829e-8467-455e-8f93-826bc81dae09 service nova] Lock "8aafc14e-418a-4c43-80b9-54da13550c32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.984493] env[68244]: DEBUG oslo_concurrency.lockutils [req-5fead7c3-9918-4dfa-91e2-15ae01d91649 req-b38f829e-8467-455e-8f93-826bc81dae09 service nova] Lock "8aafc14e-418a-4c43-80b9-54da13550c32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.984726] env[68244]: DEBUG nova.compute.manager [req-5fead7c3-9918-4dfa-91e2-15ae01d91649 req-b38f829e-8467-455e-8f93-826bc81dae09 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] No waiting events found dispatching network-vif-plugged-843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1393.984830] env[68244]: WARNING nova.compute.manager [req-5fead7c3-9918-4dfa-91e2-15ae01d91649 req-b38f829e-8467-455e-8f93-826bc81dae09 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Received unexpected event network-vif-plugged-843d0ee5-6e7f-4508-b91e-c871fd48ee83 for instance with vm_state building and task_state spawning. [ 1394.002680] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.192s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.002680] env[68244]: DEBUG nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1394.062388] env[68244]: DEBUG nova.network.neutron [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Successfully updated port: 843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.334692] env[68244]: DEBUG nova.compute.manager [req-04ad0598-d644-4b9a-9ed5-1866f7ef761c req-41fdfd3b-885e-4370-afbb-cce71bc15485 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Received event network-vif-deleted-42390128-dc00-4c43-bb63-04d49b817a2f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1394.336364] env[68244]: INFO nova.compute.manager [req-04ad0598-d644-4b9a-9ed5-1866f7ef761c req-41fdfd3b-885e-4370-afbb-cce71bc15485 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Neutron deleted interface 42390128-dc00-4c43-bb63-04d49b817a2f; detaching it from the instance and deleting it from the info cache [ 1394.336364] env[68244]: DEBUG nova.network.neutron [req-04ad0598-d644-4b9a-9ed5-1866f7ef761c req-41fdfd3b-885e-4370-afbb-cce71bc15485 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.507098] env[68244]: DEBUG nova.compute.utils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1394.508555] env[68244]: DEBUG nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1394.508736] env[68244]: DEBUG nova.network.neutron [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1394.570821] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.570821] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.570821] env[68244]: DEBUG nova.network.neutron [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.570821] env[68244]: DEBUG nova.policy [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a17d79ef7b14c178c98a60499967c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e3ca107ec07495cb1876bd472e0cd8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1394.792290] env[68244]: DEBUG nova.network.neutron [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Successfully created port: 24acf6ef-dbe3-4894-ae35-9de68726bb13 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1394.818208] env[68244]: DEBUG nova.network.neutron [-] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.838782] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e152961d-a4ba-43de-ab22-cbe9a0c4679d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.848995] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccf6d2c-ab66-4011-9034-593116934973 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.875187] env[68244]: DEBUG nova.compute.manager [req-04ad0598-d644-4b9a-9ed5-1866f7ef761c req-41fdfd3b-885e-4370-afbb-cce71bc15485 service nova] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Detach interface failed, port_id=42390128-dc00-4c43-bb63-04d49b817a2f, reason: Instance 77ba8e47-10bb-4630-bd89-067f5ad7bad9 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1394.931293] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.931558] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.931746] env[68244]: DEBUG nova.compute.manager [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Going to confirm migration 9 {{(pid=68244) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1395.012353] env[68244]: DEBUG nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1395.096442] env[68244]: DEBUG nova.network.neutron [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.212819] env[68244]: DEBUG nova.network.neutron [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updating instance_info_cache with network_info: [{"id": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "address": "fa:16:3e:88:de:fe", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap843d0ee5-6e", "ovs_interfaceid": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.322279] env[68244]: INFO nova.compute.manager [-] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Took 1.48 seconds to deallocate network for instance. [ 1395.492543] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.492794] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquired lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1395.493101] env[68244]: DEBUG nova.network.neutron [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1395.493401] env[68244]: DEBUG nova.objects.instance [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'info_cache' on Instance uuid 5519079a-d2a5-48c5-921c-199e0fc60aa3 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1395.716112] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1395.716415] env[68244]: DEBUG nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Instance network_info: |[{"id": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "address": "fa:16:3e:88:de:fe", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap843d0ee5-6e", "ovs_interfaceid": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1395.716841] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:de:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92cdccfd-4b10-4024-b724-5f22792dd4de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '843d0ee5-6e7f-4508-b91e-c871fd48ee83', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1395.724321] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1395.724519] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1395.724742] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bd5849f-3330-443f-b4f1-1f4265068be9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.743568] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1395.743568] env[68244]: value = "task-2781492" [ 1395.743568] env[68244]: _type = "Task" [ 1395.743568] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.750481] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781492, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.829221] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.829441] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.829667] env[68244]: DEBUG nova.objects.instance [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lazy-loading 'resources' on Instance uuid 77ba8e47-10bb-4630-bd89-067f5ad7bad9 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1396.009212] env[68244]: DEBUG nova.compute.manager [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Received event network-changed-843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1396.009460] env[68244]: DEBUG nova.compute.manager [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Refreshing instance network info cache due to event network-changed-843d0ee5-6e7f-4508-b91e-c871fd48ee83. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1396.009602] env[68244]: DEBUG oslo_concurrency.lockutils [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] Acquiring lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.009634] env[68244]: DEBUG oslo_concurrency.lockutils [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] Acquired lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.009860] env[68244]: DEBUG nova.network.neutron [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Refreshing network info cache for port 843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.021642] env[68244]: DEBUG nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1396.048539] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1396.048772] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.048950] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1396.049145] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.049291] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1396.049436] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1396.049637] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1396.049860] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1396.049952] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1396.050123] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1396.050299] env[68244]: DEBUG nova.virt.hardware [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1396.051204] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939e1233-be13-4283-bc64-b6e889b0a359 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.059612] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf7f862-1a09-4df4-96ce-84133e06530d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.250717] env[68244]: DEBUG nova.network.neutron [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Successfully updated port: 24acf6ef-dbe3-4894-ae35-9de68726bb13 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1396.258048] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781492, 'name': CreateVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.362420] env[68244]: DEBUG nova.compute.manager [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Received event network-vif-plugged-24acf6ef-dbe3-4894-ae35-9de68726bb13 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1396.362641] env[68244]: DEBUG oslo_concurrency.lockutils [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] Acquiring lock "b6e13d36-31e3-4d07-894e-cc540acdaf21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.363300] env[68244]: DEBUG oslo_concurrency.lockutils [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.363300] env[68244]: DEBUG oslo_concurrency.lockutils [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.363300] env[68244]: DEBUG nova.compute.manager [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] No waiting events found dispatching network-vif-plugged-24acf6ef-dbe3-4894-ae35-9de68726bb13 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1396.363300] env[68244]: WARNING nova.compute.manager [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Received unexpected event network-vif-plugged-24acf6ef-dbe3-4894-ae35-9de68726bb13 for instance with vm_state building and task_state spawning. [ 1396.363515] env[68244]: DEBUG nova.compute.manager [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Received event network-changed-24acf6ef-dbe3-4894-ae35-9de68726bb13 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1396.363606] env[68244]: DEBUG nova.compute.manager [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Refreshing instance network info cache due to event network-changed-24acf6ef-dbe3-4894-ae35-9de68726bb13. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1396.363791] env[68244]: DEBUG oslo_concurrency.lockutils [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] Acquiring lock "refresh_cache-b6e13d36-31e3-4d07-894e-cc540acdaf21" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.363959] env[68244]: DEBUG oslo_concurrency.lockutils [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] Acquired lock "refresh_cache-b6e13d36-31e3-4d07-894e-cc540acdaf21" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.364141] env[68244]: DEBUG nova.network.neutron [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Refreshing network info cache for port 24acf6ef-dbe3-4894-ae35-9de68726bb13 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.424836] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e7b238-5fdd-444a-801e-2bbbf8f4017f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.432193] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38328ea-93c3-482d-af12-cd767c11fd5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.461109] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378211e1-5ca9-47d5-a0c6-60f47aac00e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.470358] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d46ea8-79a0-42ec-b863-928e2c5a209f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.492994] env[68244]: DEBUG nova.compute.provider_tree [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.689238] env[68244]: DEBUG nova.network.neutron [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updated VIF entry in instance network info cache for port 843d0ee5-6e7f-4508-b91e-c871fd48ee83. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1396.689679] env[68244]: DEBUG nova.network.neutron [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updating instance_info_cache with network_info: [{"id": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "address": "fa:16:3e:88:de:fe", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap843d0ee5-6e", "ovs_interfaceid": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.724179] env[68244]: DEBUG nova.network.neutron [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [{"id": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "address": "fa:16:3e:79:b9:93", "network": {"id": "dc532440-82f4-463d-bc79-83ba43b7b95f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1424369771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a151f53070d94d08bf7e85617a6f5190", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd5bf8e6-bd", "ovs_interfaceid": "cd5bf8e6-bd7d-473b-889f-4d23b1c887ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.754856] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-b6e13d36-31e3-4d07-894e-cc540acdaf21" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.755096] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781492, 'name': CreateVM_Task, 'duration_secs': 0.526784} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.755253] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1396.755901] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.756075] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.756417] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1396.756662] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a095295-39d8-4267-bc45-fffe31923e9e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.760990] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1396.760990] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9568f-3b5c-56ae-a1a7-6cc76e632be4" [ 1396.760990] env[68244]: _type = "Task" [ 1396.760990] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.768674] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9568f-3b5c-56ae-a1a7-6cc76e632be4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.898017] env[68244]: DEBUG nova.network.neutron [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1396.962513] env[68244]: DEBUG nova.network.neutron [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.997166] env[68244]: DEBUG nova.scheduler.client.report [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1397.194345] env[68244]: DEBUG oslo_concurrency.lockutils [req-4a6579c2-528b-42ac-8331-80d581c26e7c req-acb4e6a1-4a8a-4dfc-965f-adccc2a62376 service nova] Releasing lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.230660] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Releasing lock "refresh_cache-5519079a-d2a5-48c5-921c-199e0fc60aa3" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.230985] env[68244]: DEBUG nova.objects.instance [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'migration_context' on Instance uuid 5519079a-d2a5-48c5-921c-199e0fc60aa3 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1397.271246] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52e9568f-3b5c-56ae-a1a7-6cc76e632be4, 'name': SearchDatastore_Task, 'duration_secs': 0.011634} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.271509] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.271739] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1397.271966] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.272127] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.272309] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1397.272801] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18aec422-5a5e-49a9-b089-efdc37d71c84 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.281087] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1397.281267] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1397.282014] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85d233a6-e0be-428d-bada-2feef625498f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.286505] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1397.286505] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527d2f6d-6f9d-4cf4-d2a2-81018c96bbb6" [ 1397.286505] env[68244]: _type = "Task" [ 1397.286505] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.293492] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527d2f6d-6f9d-4cf4-d2a2-81018c96bbb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.464916] env[68244]: DEBUG oslo_concurrency.lockutils [req-646c4a34-ec0f-4cfa-925d-b10a146d2291 req-0adbab84-713a-4842-a94a-807a39cadccc service nova] Releasing lock "refresh_cache-b6e13d36-31e3-4d07-894e-cc540acdaf21" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.465271] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-b6e13d36-31e3-4d07-894e-cc540acdaf21" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.465459] env[68244]: DEBUG nova.network.neutron [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1397.501707] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.672s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.521277] env[68244]: INFO nova.scheduler.client.report [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Deleted allocations for instance 77ba8e47-10bb-4630-bd89-067f5ad7bad9 [ 1397.733834] env[68244]: DEBUG nova.objects.base [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Object Instance<5519079a-d2a5-48c5-921c-199e0fc60aa3> lazy-loaded attributes: info_cache,migration_context {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1397.734783] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0038af32-d785-4796-8e35-629ef4bfebeb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.754919] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcdb8519-9dbd-411d-8ff1-73913418cf94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.759668] env[68244]: DEBUG oslo_vmware.api [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1397.759668] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52559b88-6d00-699c-5431-22caf61d931f" [ 1397.759668] env[68244]: _type = "Task" [ 1397.759668] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.766833] env[68244]: DEBUG oslo_vmware.api [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52559b88-6d00-699c-5431-22caf61d931f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.794515] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527d2f6d-6f9d-4cf4-d2a2-81018c96bbb6, 'name': SearchDatastore_Task, 'duration_secs': 0.008724} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.795210] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35634d12-0455-49de-9956-b140b05b7ce2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.799486] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1397.799486] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bae14e-caf1-66fb-a6f5-23f8ec33fc93" [ 1397.799486] env[68244]: _type = "Task" [ 1397.799486] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.806294] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bae14e-caf1-66fb-a6f5-23f8ec33fc93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.994810] env[68244]: DEBUG nova.network.neutron [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1398.032818] env[68244]: DEBUG oslo_concurrency.lockutils [None req-99168ea2-5361-438d-9fb9-12767c3ebf73 tempest-ServerActionsTestOtherB-65371887 tempest-ServerActionsTestOtherB-65371887-project-member] Lock "77ba8e47-10bb-4630-bd89-067f5ad7bad9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.833s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.119949] env[68244]: DEBUG nova.network.neutron [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Updating instance_info_cache with network_info: [{"id": "24acf6ef-dbe3-4894-ae35-9de68726bb13", "address": "fa:16:3e:61:f5:a2", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24acf6ef-db", "ovs_interfaceid": "24acf6ef-dbe3-4894-ae35-9de68726bb13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.270432] env[68244]: DEBUG oslo_vmware.api [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52559b88-6d00-699c-5431-22caf61d931f, 'name': SearchDatastore_Task, 'duration_secs': 0.006297} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.270811] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.270931] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.309359] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bae14e-caf1-66fb-a6f5-23f8ec33fc93, 'name': SearchDatastore_Task, 'duration_secs': 0.007979} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.309633] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.309895] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8aafc14e-418a-4c43-80b9-54da13550c32/8aafc14e-418a-4c43-80b9-54da13550c32.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1398.310643] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-889f58d1-90e4-40db-936a-4ff9fa0eb077 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.317308] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1398.317308] env[68244]: value = "task-2781493" [ 1398.317308] env[68244]: _type = "Task" [ 1398.317308] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.325714] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.622933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-b6e13d36-31e3-4d07-894e-cc540acdaf21" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.623253] env[68244]: DEBUG nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Instance network_info: |[{"id": "24acf6ef-dbe3-4894-ae35-9de68726bb13", "address": "fa:16:3e:61:f5:a2", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24acf6ef-db", "ovs_interfaceid": "24acf6ef-dbe3-4894-ae35-9de68726bb13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1398.623716] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:f5:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24acf6ef-dbe3-4894-ae35-9de68726bb13', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1398.631314] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1398.631549] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1398.631781] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00a842a0-906c-4367-93a9-12deb08033ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.652782] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1398.652782] env[68244]: value = "task-2781495" [ 1398.652782] env[68244]: _type = "Task" [ 1398.652782] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.665114] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781495, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.827804] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781493, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.416867} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.827804] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 8aafc14e-418a-4c43-80b9-54da13550c32/8aafc14e-418a-4c43-80b9-54da13550c32.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1398.828122] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1398.828198] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a00506a-ea5e-476f-a743-e468df64d15c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.836957] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1398.836957] env[68244]: value = "task-2781496" [ 1398.836957] env[68244]: _type = "Task" [ 1398.836957] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.845991] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781496, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.866509] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a28517-5eae-40bc-a51d-66184854954c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.873912] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b870f84a-502a-43ab-aa78-aed8c2b3fb57 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.905840] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cc0720-5ffb-43ec-8e87-306e980cff51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.913737] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ce5e38-e3b2-4df1-a6e3-3ea3d297fc9a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.927592] env[68244]: DEBUG nova.compute.provider_tree [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.163455] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781495, 'name': CreateVM_Task, 'duration_secs': 0.318183} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.163644] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.164272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.164442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1399.164761] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1399.165015] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5099a4c8-84ea-4794-828c-65468a38ed74 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.169483] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1399.169483] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528dab74-7d5e-3766-67e2-8def72f51edd" [ 1399.169483] env[68244]: _type = "Task" [ 1399.169483] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.177166] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528dab74-7d5e-3766-67e2-8def72f51edd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.347660] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781496, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069006} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.348075] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.348781] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a310ad54-b2de-4098-9933-93b9b1054d38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.370142] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 8aafc14e-418a-4c43-80b9-54da13550c32/8aafc14e-418a-4c43-80b9-54da13550c32.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.370399] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4afde1f-6643-4eaa-829a-343bfa337b24 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.393225] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1399.393225] env[68244]: value = "task-2781497" [ 1399.393225] env[68244]: _type = "Task" [ 1399.393225] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.401119] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781497, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.431568] env[68244]: DEBUG nova.scheduler.client.report [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1399.679643] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]528dab74-7d5e-3766-67e2-8def72f51edd, 'name': SearchDatastore_Task, 'duration_secs': 0.009501} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.679934] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1399.680198] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1399.680451] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.680600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1399.680778] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1399.681034] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-779992c0-dc7a-43d9-aabc-f8317437b498 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.688341] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1399.688515] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1399.689198] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b255755-27d5-4f7a-a433-5be5e554d001 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.695513] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1399.695513] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ea4c7a-0f6f-c0c3-202a-0d23ffdfc572" [ 1399.695513] env[68244]: _type = "Task" [ 1399.695513] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.702172] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ea4c7a-0f6f-c0c3-202a-0d23ffdfc572, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.902882] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781497, 'name': ReconfigVM_Task, 'duration_secs': 0.27123} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.903199] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 8aafc14e-418a-4c43-80b9-54da13550c32/8aafc14e-418a-4c43-80b9-54da13550c32.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1399.903828] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c29c803-7717-4b17-ae65-8bce05375668 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.910472] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1399.910472] env[68244]: value = "task-2781498" [ 1399.910472] env[68244]: _type = "Task" [ 1399.910472] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.917903] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781498, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.207242] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ea4c7a-0f6f-c0c3-202a-0d23ffdfc572, 'name': SearchDatastore_Task, 'duration_secs': 0.008319} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.207242] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83009b05-4e16-4740-b38a-55a8df43e83d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.212660] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1400.212660] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5222d7f6-7ce1-3f8e-ff14-bf6105e38fb5" [ 1400.212660] env[68244]: _type = "Task" [ 1400.212660] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.220119] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5222d7f6-7ce1-3f8e-ff14-bf6105e38fb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.419480] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781498, 'name': Rename_Task, 'duration_secs': 0.138398} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.419753] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.419984] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-227975eb-37f6-40ca-9d7a-1964836de16e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.427428] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1400.427428] env[68244]: value = "task-2781499" [ 1400.427428] env[68244]: _type = "Task" [ 1400.427428] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.434273] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.441894] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.171s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.722940] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5222d7f6-7ce1-3f8e-ff14-bf6105e38fb5, 'name': SearchDatastore_Task, 'duration_secs': 0.009656} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.723219] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1400.723487] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b6e13d36-31e3-4d07-894e-cc540acdaf21/b6e13d36-31e3-4d07-894e-cc540acdaf21.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1400.723769] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5db2809-6c48-43af-8458-6c5351d8cdaa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.730263] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1400.730263] env[68244]: value = "task-2781500" [ 1400.730263] env[68244]: _type = "Task" [ 1400.730263] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.738121] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.938592] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781499, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.008136] env[68244]: INFO nova.scheduler.client.report [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted allocation for migration c063d393-46a5-45f5-ab79-238d0c7f4655 [ 1401.240761] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.444064} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.241305] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] b6e13d36-31e3-4d07-894e-cc540acdaf21/b6e13d36-31e3-4d07-894e-cc540acdaf21.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1401.241575] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1401.241909] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2679da48-fe34-4113-8f26-9170327cb42d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.249659] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1401.249659] env[68244]: value = "task-2781501" [ 1401.249659] env[68244]: _type = "Task" [ 1401.249659] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.257287] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781501, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.439097] env[68244]: DEBUG oslo_vmware.api [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781499, 'name': PowerOnVM_Task, 'duration_secs': 0.574328} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.439443] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.439711] env[68244]: INFO nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Took 7.61 seconds to spawn the instance on the hypervisor. [ 1401.440044] env[68244]: DEBUG nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1401.440922] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d53798-8ff6-4083-a6fd-7cacd171ed16 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.514486] env[68244]: DEBUG oslo_concurrency.lockutils [None req-42a18fbf-3e60-4c23-b558-1707f0e90013 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.583s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.521211] env[68244]: INFO nova.compute.manager [None req-23293309-aa7b-4e5d-8880-91a9e853b1fc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Get console output [ 1401.521456] env[68244]: WARNING nova.virt.vmwareapi.driver [None req-23293309-aa7b-4e5d-8880-91a9e853b1fc tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] The console log is missing. Check your VSPC configuration [ 1401.759163] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781501, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065442} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.759680] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1401.760577] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab16d47-16b5-4ced-87b9-d1779534bb75 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.782498] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] b6e13d36-31e3-4d07-894e-cc540acdaf21/b6e13d36-31e3-4d07-894e-cc540acdaf21.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1401.783104] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a51f7b5f-837f-413b-be4b-158329319de3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.802326] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1401.802326] env[68244]: value = "task-2781502" [ 1401.802326] env[68244]: _type = "Task" [ 1401.802326] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.811016] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781502, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.958532] env[68244]: INFO nova.compute.manager [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Took 12.33 seconds to build instance. [ 1402.313364] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781502, 'name': ReconfigVM_Task, 'duration_secs': 0.295362} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.313690] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Reconfigured VM instance instance-00000076 to attach disk [datastore2] b6e13d36-31e3-4d07-894e-cc540acdaf21/b6e13d36-31e3-4d07-894e-cc540acdaf21.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1402.314370] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c72e3715-d4dc-48e8-9713-f4e3090d0437 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.320827] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1402.320827] env[68244]: value = "task-2781503" [ 1402.320827] env[68244]: _type = "Task" [ 1402.320827] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.328952] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781503, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.465038] env[68244]: DEBUG oslo_concurrency.lockutils [None req-3b7c258b-4f9f-41e8-ae77-b8cbec5adc85 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.849s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1402.832599] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781503, 'name': Rename_Task, 'duration_secs': 0.142766} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.832877] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1402.833158] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-daa3c294-5c0b-433b-b7a4-febb8b1dda96 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.839618] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1402.839618] env[68244]: value = "task-2781504" [ 1402.839618] env[68244]: _type = "Task" [ 1402.839618] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.847338] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.982922] env[68244]: DEBUG nova.compute.manager [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Received event network-changed-843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1402.983048] env[68244]: DEBUG nova.compute.manager [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Refreshing instance network info cache due to event network-changed-843d0ee5-6e7f-4508-b91e-c871fd48ee83. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1402.983267] env[68244]: DEBUG oslo_concurrency.lockutils [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] Acquiring lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.983418] env[68244]: DEBUG oslo_concurrency.lockutils [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] Acquired lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.983580] env[68244]: DEBUG nova.network.neutron [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Refreshing network info cache for port 843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1403.352202] env[68244]: DEBUG oslo_vmware.api [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781504, 'name': PowerOnVM_Task, 'duration_secs': 0.439276} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.352510] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1403.352728] env[68244]: INFO nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Took 7.33 seconds to spawn the instance on the hypervisor. [ 1403.352914] env[68244]: DEBUG nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1403.354028] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336dd661-df3a-4660-95e1-d79cf5023b39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.880372] env[68244]: INFO nova.compute.manager [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Took 12.23 seconds to build instance. [ 1403.918965] env[68244]: DEBUG nova.network.neutron [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updated VIF entry in instance network info cache for port 843d0ee5-6e7f-4508-b91e-c871fd48ee83. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1403.919344] env[68244]: DEBUG nova.network.neutron [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updating instance_info_cache with network_info: [{"id": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "address": "fa:16:3e:88:de:fe", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap843d0ee5-6e", "ovs_interfaceid": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.382844] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bc18cb6f-ed0d-4c52-97a4-cf84d43be1f2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.739s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.422594] env[68244]: DEBUG oslo_concurrency.lockutils [req-c9f32c55-35f9-4b3f-bee1-2baa59073809 req-e29f9b29-9854-4ebb-8c7d-cfcd2e7a3e6b service nova] Releasing lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1406.078101] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.078381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.582283] env[68244]: DEBUG nova.compute.utils [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1407.085309] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1408.153460] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1408.153788] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1408.153967] env[68244]: INFO nova.compute.manager [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Attaching volume 67396f7a-7573-4668-8b08-7360d29a966f to /dev/sdb [ 1408.183439] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f7beb9-d507-4218-b8fa-8d8bf45270e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.190777] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a91065e-35a7-4d18-bec8-a6e40a141d9c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.203440] env[68244]: DEBUG nova.virt.block_device [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Updating existing volume attachment record: ae778c90-25ff-47f5-8ce0-c590f642c67c {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1413.247298] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1413.247627] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559201', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'name': 'volume-67396f7a-7573-4668-8b08-7360d29a966f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b6e13d36-31e3-4d07-894e-cc540acdaf21', 'attached_at': '', 'detached_at': '', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'serial': '67396f7a-7573-4668-8b08-7360d29a966f'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1413.248606] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad16a99-27cf-4031-946c-a7c59b8381c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.267289] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e346486-778e-48cc-ac5a-3ffe7608350d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.292311] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] volume-67396f7a-7573-4668-8b08-7360d29a966f/volume-67396f7a-7573-4668-8b08-7360d29a966f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1413.292680] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c566cec-2655-4d81-92e7-d0b0ea9a9dbe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.311606] env[68244]: DEBUG oslo_vmware.api [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1413.311606] env[68244]: value = "task-2781515" [ 1413.311606] env[68244]: _type = "Task" [ 1413.311606] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.319241] env[68244]: DEBUG oslo_vmware.api [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.821885] env[68244]: DEBUG oslo_vmware.api [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781515, 'name': ReconfigVM_Task, 'duration_secs': 0.353871} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.822247] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Reconfigured VM instance instance-00000076 to attach disk [datastore2] volume-67396f7a-7573-4668-8b08-7360d29a966f/volume-67396f7a-7573-4668-8b08-7360d29a966f.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1413.827036] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a9f8f8b-e88b-4967-bd49-da1ca2204080 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.841556] env[68244]: DEBUG oslo_vmware.api [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1413.841556] env[68244]: value = "task-2781516" [ 1413.841556] env[68244]: _type = "Task" [ 1413.841556] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.848840] env[68244]: DEBUG oslo_vmware.api [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781516, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.351746] env[68244]: DEBUG oslo_vmware.api [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781516, 'name': ReconfigVM_Task, 'duration_secs': 0.13544} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.352057] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559201', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'name': 'volume-67396f7a-7573-4668-8b08-7360d29a966f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b6e13d36-31e3-4d07-894e-cc540acdaf21', 'attached_at': '', 'detached_at': '', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'serial': '67396f7a-7573-4668-8b08-7360d29a966f'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1415.386923] env[68244]: DEBUG nova.objects.instance [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'flavor' on Instance uuid b6e13d36-31e3-4d07-894e-cc540acdaf21 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1415.502662] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.894673] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9359d21c-e30b-4ff6-83e0-8c73a64d147b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.741s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1415.896031] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.393s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.896331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "b6e13d36-31e3-4d07-894e-cc540acdaf21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.896572] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.896760] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1415.898610] env[68244]: INFO nova.compute.manager [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Terminating instance [ 1416.402179] env[68244]: DEBUG nova.compute.manager [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1416.402492] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1416.402675] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bb95ae4-d1dd-458b-ba59-a5f2b28eb0c3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.410444] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1416.410444] env[68244]: value = "task-2781518" [ 1416.410444] env[68244]: _type = "Task" [ 1416.410444] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.419374] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781518, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.921994] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781518, 'name': PowerOffVM_Task, 'duration_secs': 0.208706} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.922279] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1416.922477] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1416.922672] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559201', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'name': 'volume-67396f7a-7573-4668-8b08-7360d29a966f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b6e13d36-31e3-4d07-894e-cc540acdaf21', 'attached_at': '', 'detached_at': '', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'serial': '67396f7a-7573-4668-8b08-7360d29a966f'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1416.923432] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-909805ff-a117-46e0-9cb5-f300529797e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.943731] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984c751d-33b9-41ec-a00e-5af8a2f35560 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.950075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459505ff-2ed5-465b-a97f-9c6e8d9c3644 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.969602] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa63a57-ccb8-461d-ad03-4d35443d63fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.982893] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] The volume has not been displaced from its original location: [datastore2] volume-67396f7a-7573-4668-8b08-7360d29a966f/volume-67396f7a-7573-4668-8b08-7360d29a966f.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1416.988099] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1416.988346] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2ea2e77-6048-4ca2-b9f7-af17314dc8ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.004712] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1417.004712] env[68244]: value = "task-2781519" [ 1417.004712] env[68244]: _type = "Task" [ 1417.004712] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.011511] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781519, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.514674] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781519, 'name': ReconfigVM_Task, 'duration_secs': 0.17724} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.515040] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1417.519568] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-085ed219-a60e-43af-8b3c-3bf4e01b61e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.534583] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1417.534583] env[68244]: value = "task-2781521" [ 1417.534583] env[68244]: _type = "Task" [ 1417.534583] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.542205] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.044245] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781521, 'name': ReconfigVM_Task, 'duration_secs': 0.161832} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.044538] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559201', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'name': 'volume-67396f7a-7573-4668-8b08-7360d29a966f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b6e13d36-31e3-4d07-894e-cc540acdaf21', 'attached_at': '', 'detached_at': '', 'volume_id': '67396f7a-7573-4668-8b08-7360d29a966f', 'serial': '67396f7a-7573-4668-8b08-7360d29a966f'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1418.044823] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1418.045606] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7497c0dd-8502-47eb-a189-4b2ab29fbafe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.051664] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1418.051882] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98e00a6a-516c-43ab-8143-5ba6e99beedb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.110358] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1418.110593] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1418.110740] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleting the datastore file [datastore2] b6e13d36-31e3-4d07-894e-cc540acdaf21 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1418.111011] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74d55f3e-b891-4d69-9e57-3292c53a6386 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.116211] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1418.116211] env[68244]: value = "task-2781523" [ 1418.116211] env[68244]: _type = "Task" [ 1418.116211] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.123253] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.627917] env[68244]: DEBUG oslo_vmware.api [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123414} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.628226] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1418.628331] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1418.628508] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1418.628682] env[68244]: INFO nova.compute.manager [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1418.628922] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1418.629135] env[68244]: DEBUG nova.compute.manager [-] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1418.629229] env[68244]: DEBUG nova.network.neutron [-] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1418.851497] env[68244]: DEBUG nova.compute.manager [req-06530f63-06ec-41df-aaea-1e0cae8ace59 req-d55b4215-e8f7-4679-ba30-0c130e8d72b0 service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Received event network-vif-deleted-24acf6ef-dbe3-4894-ae35-9de68726bb13 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1418.851734] env[68244]: INFO nova.compute.manager [req-06530f63-06ec-41df-aaea-1e0cae8ace59 req-d55b4215-e8f7-4679-ba30-0c130e8d72b0 service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Neutron deleted interface 24acf6ef-dbe3-4894-ae35-9de68726bb13; detaching it from the instance and deleting it from the info cache [ 1418.851890] env[68244]: DEBUG nova.network.neutron [req-06530f63-06ec-41df-aaea-1e0cae8ace59 req-d55b4215-e8f7-4679-ba30-0c130e8d72b0 service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.329885] env[68244]: DEBUG nova.network.neutron [-] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.355251] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d60a1386-f9d3-4bb2-ac30-fea7384fc5da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.364098] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d339c1b-c512-4624-bf03-555cb0d9c259 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.391324] env[68244]: DEBUG nova.compute.manager [req-06530f63-06ec-41df-aaea-1e0cae8ace59 req-d55b4215-e8f7-4679-ba30-0c130e8d72b0 service nova] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Detach interface failed, port_id=24acf6ef-dbe3-4894-ae35-9de68726bb13, reason: Instance b6e13d36-31e3-4d07-894e-cc540acdaf21 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1419.832052] env[68244]: INFO nova.compute.manager [-] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Took 1.20 seconds to deallocate network for instance. [ 1420.372412] env[68244]: INFO nova.compute.manager [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Took 0.54 seconds to detach 1 volumes for instance. [ 1420.878745] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1420.879111] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1420.879276] env[68244]: DEBUG nova.objects.instance [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'resources' on Instance uuid b6e13d36-31e3-4d07-894e-cc540acdaf21 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1421.453373] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9bfce2-8eca-4791-ac1b-a05c2142f21e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.460620] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5cf782-7adb-4e08-b79c-65dbe0a7e847 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.493810] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a332ca91-2e37-4020-8712-ac3a7d9351e8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.501353] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a341012-30d7-4fa7-882a-ed7a5fec27b3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.514998] env[68244]: DEBUG nova.compute.provider_tree [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1422.018950] env[68244]: DEBUG nova.scheduler.client.report [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1422.524742] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.645s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1422.547571] env[68244]: INFO nova.scheduler.client.report [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted allocations for instance b6e13d36-31e3-4d07-894e-cc540acdaf21 [ 1423.062395] env[68244]: DEBUG oslo_concurrency.lockutils [None req-fe70aa12-fc8c-4a05-ab8f-23d0f493453b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "b6e13d36-31e3-4d07-894e-cc540acdaf21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.166s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1424.171301] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1424.171653] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1424.549359] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquiring lock "ff690eea-6e5a-42a3-bf85-1b844425df2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1424.549583] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1424.673818] env[68244]: DEBUG nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1425.051810] env[68244]: DEBUG nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1425.198220] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1425.198504] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1425.199976] env[68244]: INFO nova.compute.claims [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1425.573561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.282060] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1a8349-f73f-48b0-b62a-3427dc45fe56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.290143] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa24b94-6fa7-4e89-baa1-02a54075884e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.319628] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7003767e-a6e8-49dc-96ed-1ead1b441e5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.326976] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b91bbd-abc5-4d42-9406-b0e56eae9b0e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.340887] env[68244]: DEBUG nova.compute.provider_tree [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.844174] env[68244]: DEBUG nova.scheduler.client.report [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1427.349182] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.150s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1427.349677] env[68244]: DEBUG nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1427.352432] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.779s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1427.354247] env[68244]: INFO nova.compute.claims [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1427.859041] env[68244]: DEBUG nova.compute.utils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1427.861776] env[68244]: DEBUG nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1427.861967] env[68244]: DEBUG nova.network.neutron [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1427.903629] env[68244]: DEBUG nova.policy [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a17d79ef7b14c178c98a60499967c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e3ca107ec07495cb1876bd472e0cd8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1428.199900] env[68244]: DEBUG nova.network.neutron [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Successfully created port: 3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1428.362819] env[68244]: DEBUG nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1428.448157] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75791b7-82c9-4aaa-9adf-c4a759445b5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.456100] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082bd65e-f2c3-4c1f-bdf6-0012f51b9212 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.486719] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.486954] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1428.487166] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.487343] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1428.487532] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1428.489833] env[68244]: INFO nova.compute.manager [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Terminating instance [ 1428.491538] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39d914c-6cca-4c97-8bf2-b47accbbb3d2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.500092] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78286c71-0e1c-4027-be78-f40dc966e6ff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.514342] env[68244]: DEBUG nova.compute.provider_tree [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.996104] env[68244]: DEBUG nova.compute.manager [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1428.996371] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1428.996661] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7a05dd0-b82e-4a3c-a8ea-69fd09b3f94d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.006727] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1429.006727] env[68244]: value = "task-2781526" [ 1429.006727] env[68244]: _type = "Task" [ 1429.006727] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.016604] env[68244]: DEBUG nova.scheduler.client.report [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1429.019643] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781526, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.375466] env[68244]: DEBUG nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1429.402592] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1429.402808] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1429.402965] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1429.403164] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1429.403312] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1429.403496] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1429.403796] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1429.403897] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1429.404076] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1429.404242] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1429.404416] env[68244]: DEBUG nova.virt.hardware [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1429.405312] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cee608-1f1b-4052-9b71-3078ab490aa2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.413284] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a15a928-a21c-48e1-8f0e-1df553bafd50 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.516153] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781526, 'name': PowerOffVM_Task, 'duration_secs': 0.222804} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.516416] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1429.516615] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1429.516803] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559189', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'name': 'volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5519079a-d2a5-48c5-921c-199e0fc60aa3', 'attached_at': '2025-03-06T03:33:19.000000', 'detached_at': '', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'serial': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1429.517570] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0589549b-bb71-4be9-83dd-a95d980b31f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.520609] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.520947] env[68244]: DEBUG nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1429.536923] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c124100-549d-4aef-bf6b-38144384090f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.558135] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58be932e-74e7-4e56-a4e0-b93cb51fc259 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.579716] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81871d4-324d-4407-bebe-6164604c394b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.583168] env[68244]: DEBUG nova.compute.manager [req-26f86454-c9fb-458c-b633-e6bf944c7651 req-25269b13-d750-4a26-806e-f51c7f238d6c service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Received event network-vif-plugged-3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1429.583375] env[68244]: DEBUG oslo_concurrency.lockutils [req-26f86454-c9fb-458c-b633-e6bf944c7651 req-25269b13-d750-4a26-806e-f51c7f238d6c service nova] Acquiring lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1429.583607] env[68244]: DEBUG oslo_concurrency.lockutils [req-26f86454-c9fb-458c-b633-e6bf944c7651 req-25269b13-d750-4a26-806e-f51c7f238d6c service nova] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1429.583776] env[68244]: DEBUG oslo_concurrency.lockutils [req-26f86454-c9fb-458c-b633-e6bf944c7651 req-25269b13-d750-4a26-806e-f51c7f238d6c service nova] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.583940] env[68244]: DEBUG nova.compute.manager [req-26f86454-c9fb-458c-b633-e6bf944c7651 req-25269b13-d750-4a26-806e-f51c7f238d6c service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] No waiting events found dispatching network-vif-plugged-3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1429.584118] env[68244]: WARNING nova.compute.manager [req-26f86454-c9fb-458c-b633-e6bf944c7651 req-25269b13-d750-4a26-806e-f51c7f238d6c service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Received unexpected event network-vif-plugged-3e8c373d-1ea2-4964-876e-fe7a4a60a83a for instance with vm_state building and task_state spawning. [ 1429.597875] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] The volume has not been displaced from its original location: [datastore2] volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6/volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1429.603038] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1429.603335] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca559645-0f0b-4b70-94ea-fe9596db4b4a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.621812] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1429.621812] env[68244]: value = "task-2781527" [ 1429.621812] env[68244]: _type = "Task" [ 1429.621812] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.629843] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.660066] env[68244]: DEBUG nova.network.neutron [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Successfully updated port: 3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1430.038546] env[68244]: DEBUG nova.compute.utils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1430.040018] env[68244]: DEBUG nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1430.040161] env[68244]: DEBUG nova.network.neutron [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1430.075339] env[68244]: DEBUG nova.policy [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '057b0be3ed9646b781ee053ff2fabbb9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '711ad4e6a1fb410299cce4337f11c050', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1430.131962] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781527, 'name': ReconfigVM_Task, 'duration_secs': 0.1674} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.132247] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1430.136932] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-678b3652-7471-4212-b1b2-b48540ec6754 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.152915] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1430.152915] env[68244]: value = "task-2781528" [ 1430.152915] env[68244]: _type = "Task" [ 1430.152915] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.163222] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.163442] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1430.163578] env[68244]: DEBUG nova.network.neutron [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1430.164827] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781528, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.421495] env[68244]: DEBUG nova.network.neutron [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Successfully created port: b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1430.545412] env[68244]: DEBUG nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1430.663578] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781528, 'name': ReconfigVM_Task, 'duration_secs': 0.139121} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.663888] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559189', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'name': 'volume-e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5519079a-d2a5-48c5-921c-199e0fc60aa3', 'attached_at': '2025-03-06T03:33:19.000000', 'detached_at': '', 'volume_id': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6', 'serial': 'e6180bf6-5d7f-41f5-a61b-f7d3249193c6'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1430.664186] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.664930] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06642ea-cea0-4d3c-9554-925612cc452d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.673391] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1430.673562] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7a7be0d-bf94-447d-84e7-1bd91070652d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.695831] env[68244]: DEBUG nova.network.neutron [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1430.741545] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1430.741761] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1430.741956] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleting the datastore file [datastore2] 5519079a-d2a5-48c5-921c-199e0fc60aa3 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1430.742214] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b72ac51f-4f9a-4638-bd7a-d4cc856bc293 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.750935] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1430.750935] env[68244]: value = "task-2781530" [ 1430.750935] env[68244]: _type = "Task" [ 1430.750935] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.758670] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.817961] env[68244]: DEBUG nova.network.neutron [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Updating instance_info_cache with network_info: [{"id": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "address": "fa:16:3e:f7:4c:df", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8c373d-1e", "ovs_interfaceid": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.049954] env[68244]: INFO nova.virt.block_device [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Booting with volume f73eeacc-040a-4905-bf66-efaacecff4fe at /dev/sda [ 1431.085175] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c0a8ec9-b4dc-4766-9317-48a846a0cd37 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.096089] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66905bf-8837-4e60-869f-068fed1aef3e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.124810] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f286ecfa-5498-49f6-bf93-6cdb6ffc0659 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.133204] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd81e9f-d7c8-43c9-bceb-257dfb05f30f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.162196] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7d94b0-a000-4413-90ab-dd1e47bfe9c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.169878] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00077e6b-91da-454a-a21a-f4b87985692d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.182993] env[68244]: DEBUG nova.virt.block_device [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updating existing volume attachment record: 890efb68-f082-4a1c-8d6a-fb75b83487cb {{(pid=68244) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1431.261188] env[68244]: DEBUG oslo_vmware.api [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084369} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.261559] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.261830] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.262037] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.262233] env[68244]: INFO nova.compute.manager [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1431.262475] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1431.262686] env[68244]: DEBUG nova.compute.manager [-] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1431.262779] env[68244]: DEBUG nova.network.neutron [-] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1431.320170] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1431.320511] env[68244]: DEBUG nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Instance network_info: |[{"id": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "address": "fa:16:3e:f7:4c:df", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8c373d-1e", "ovs_interfaceid": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1431.320879] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:4c:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e8c373d-1ea2-4964-876e-fe7a4a60a83a', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1431.328397] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1431.328587] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1431.328817] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-348138ed-b5a2-42e8-bef5-68bf97e4080a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.348684] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1431.348684] env[68244]: value = "task-2781531" [ 1431.348684] env[68244]: _type = "Task" [ 1431.348684] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.356554] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781531, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.603747] env[68244]: DEBUG nova.compute.manager [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Received event network-changed-3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1431.604182] env[68244]: DEBUG nova.compute.manager [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Refreshing instance network info cache due to event network-changed-3e8c373d-1ea2-4964-876e-fe7a4a60a83a. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1431.604569] env[68244]: DEBUG oslo_concurrency.lockutils [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] Acquiring lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.604874] env[68244]: DEBUG oslo_concurrency.lockutils [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] Acquired lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1431.605236] env[68244]: DEBUG nova.network.neutron [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Refreshing network info cache for port 3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1431.859390] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781531, 'name': CreateVM_Task, 'duration_secs': 0.317445} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.859599] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1431.860307] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.860727] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1431.860877] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1431.861174] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe62d209-2da7-43fd-85b2-dc0e00858107 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.866101] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1431.866101] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cca925-405f-486b-8e6b-231f4dbf2df9" [ 1431.866101] env[68244]: _type = "Task" [ 1431.866101] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.875110] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cca925-405f-486b-8e6b-231f4dbf2df9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.889665] env[68244]: DEBUG nova.network.neutron [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Successfully updated port: b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1431.929117] env[68244]: DEBUG nova.compute.manager [req-8f93393f-3599-44cb-afa5-d41f6bccee73 req-6e0926da-25c1-43cf-aeaa-491fd631a5e0 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Received event network-vif-deleted-cd5bf8e6-bd7d-473b-889f-4d23b1c887ab {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1431.929770] env[68244]: INFO nova.compute.manager [req-8f93393f-3599-44cb-afa5-d41f6bccee73 req-6e0926da-25c1-43cf-aeaa-491fd631a5e0 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Neutron deleted interface cd5bf8e6-bd7d-473b-889f-4d23b1c887ab; detaching it from the instance and deleting it from the info cache [ 1431.929974] env[68244]: DEBUG nova.network.neutron [req-8f93393f-3599-44cb-afa5-d41f6bccee73 req-6e0926da-25c1-43cf-aeaa-491fd631a5e0 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.294825] env[68244]: DEBUG nova.network.neutron [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Updated VIF entry in instance network info cache for port 3e8c373d-1ea2-4964-876e-fe7a4a60a83a. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1432.295193] env[68244]: DEBUG nova.network.neutron [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Updating instance_info_cache with network_info: [{"id": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "address": "fa:16:3e:f7:4c:df", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8c373d-1e", "ovs_interfaceid": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.377334] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52cca925-405f-486b-8e6b-231f4dbf2df9, 'name': SearchDatastore_Task, 'duration_secs': 0.010934} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.377578] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1432.377811] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1432.378057] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.378208] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1432.378386] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1432.378664] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83d42ba7-772d-4a55-8f18-66b5d2fe61bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.387043] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1432.387220] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1432.387893] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d68f8e6-9b29-4b7e-bdb7-2a1a024f6dd9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.390175] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquiring lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.390306] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquired lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1432.390453] env[68244]: DEBUG nova.network.neutron [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1432.394868] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1432.394868] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525a97c0-4553-2353-e93d-a8453e22a371" [ 1432.394868] env[68244]: _type = "Task" [ 1432.394868] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.403931] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525a97c0-4553-2353-e93d-a8453e22a371, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.408566] env[68244]: DEBUG nova.network.neutron [-] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.432767] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc1afa07-9b44-48f4-badf-668087348732 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.443174] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a232d0bd-51ad-421b-bbee-f50b73bbfc53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.472900] env[68244]: DEBUG nova.compute.manager [req-8f93393f-3599-44cb-afa5-d41f6bccee73 req-6e0926da-25c1-43cf-aeaa-491fd631a5e0 service nova] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Detach interface failed, port_id=cd5bf8e6-bd7d-473b-889f-4d23b1c887ab, reason: Instance 5519079a-d2a5-48c5-921c-199e0fc60aa3 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1432.797826] env[68244]: DEBUG oslo_concurrency.lockutils [req-3433b4c0-d936-44c9-9e24-30f4d6db6a5c req-11903108-7bf5-41bb-abf8-9ff75045ee91 service nova] Releasing lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1432.906026] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525a97c0-4553-2353-e93d-a8453e22a371, 'name': SearchDatastore_Task, 'duration_secs': 0.009337} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.906736] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f25a7dd-a1b0-474f-b398-2445429f1166 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.910404] env[68244]: INFO nova.compute.manager [-] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Took 1.65 seconds to deallocate network for instance. [ 1432.913995] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1432.913995] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529959cb-66e5-c81f-f29d-5aa235a65794" [ 1432.913995] env[68244]: _type = "Task" [ 1432.913995] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.922489] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529959cb-66e5-c81f-f29d-5aa235a65794, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.923231] env[68244]: DEBUG nova.network.neutron [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1433.053013] env[68244]: DEBUG nova.network.neutron [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updating instance_info_cache with network_info: [{"id": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "address": "fa:16:3e:9a:60:e8", "network": {"id": "952ccf2f-4044-4955-bad2-b56c7be041f4", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1244966615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711ad4e6a1fb410299cce4337f11c050", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb82deb5a-5d", "ovs_interfaceid": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.078418] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.272115] env[68244]: DEBUG nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1433.272734] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1433.273222] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1433.273222] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1433.273494] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1433.273549] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1433.273700] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1433.273911] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1433.274078] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1433.274248] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1433.274422] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1433.274601] env[68244]: DEBUG nova.virt.hardware [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1433.276090] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb61e45e-adb4-4e24-b0af-1a36ca82ed7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.284766] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7a0d93-28a2-4fa3-b09c-8879fc25c0a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.425900] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]529959cb-66e5-c81f-f29d-5aa235a65794, 'name': SearchDatastore_Task, 'duration_secs': 0.010125} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.426190] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1433.426499] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d082450a-b0ab-4a54-bc0c-55541cb71fae/d082450a-b0ab-4a54-bc0c-55541cb71fae.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1433.426770] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4448b392-8eb9-4775-b366-bec40980f01b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.434260] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1433.434260] env[68244]: value = "task-2781532" [ 1433.434260] env[68244]: _type = "Task" [ 1433.434260] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.442469] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.456719] env[68244]: INFO nova.compute.manager [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Took 0.55 seconds to detach 1 volumes for instance. [ 1433.459194] env[68244]: DEBUG nova.compute.manager [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Deleting volume: e6180bf6-5d7f-41f5-a61b-f7d3249193c6 {{(pid=68244) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1433.555828] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Releasing lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1433.556221] env[68244]: DEBUG nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance network_info: |[{"id": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "address": "fa:16:3e:9a:60:e8", "network": {"id": "952ccf2f-4044-4955-bad2-b56c7be041f4", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1244966615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711ad4e6a1fb410299cce4337f11c050", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb82deb5a-5d", "ovs_interfaceid": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1433.556682] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:60:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721e64ee-fc02-4eb5-9c8c-ea55647a1b92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b82deb5a-5d45-48f8-977f-9e51a4ee39c9', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1433.565586] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Creating folder: Project (711ad4e6a1fb410299cce4337f11c050). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1433.566373] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77c153a3-dcf5-47eb-8604-676097dbcd49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.580843] env[68244]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1433.581071] env[68244]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68244) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1433.581502] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Folder already exists: Project (711ad4e6a1fb410299cce4337f11c050). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1433.581784] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Creating folder: Instances. Parent ref: group-v559197. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1433.582072] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9c7bfd6-f831-4fa3-9082-ffa0a8e93a60 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.592226] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Created folder: Instances in parent group-v559197. [ 1433.592615] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1433.592868] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1433.593152] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ddb3d7c-59c8-4de4-ba76-90445610b81b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.615180] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1433.615180] env[68244]: value = "task-2781536" [ 1433.615180] env[68244]: _type = "Task" [ 1433.615180] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.624765] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781536, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.632375] env[68244]: DEBUG nova.compute.manager [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Received event network-vif-plugged-b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1433.632726] env[68244]: DEBUG oslo_concurrency.lockutils [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] Acquiring lock "ff690eea-6e5a-42a3-bf85-1b844425df2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1433.632817] env[68244]: DEBUG oslo_concurrency.lockutils [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1433.632992] env[68244]: DEBUG oslo_concurrency.lockutils [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.633115] env[68244]: DEBUG nova.compute.manager [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] No waiting events found dispatching network-vif-plugged-b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1433.633280] env[68244]: WARNING nova.compute.manager [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Received unexpected event network-vif-plugged-b82deb5a-5d45-48f8-977f-9e51a4ee39c9 for instance with vm_state building and task_state spawning. [ 1433.633446] env[68244]: DEBUG nova.compute.manager [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Received event network-changed-b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1433.633612] env[68244]: DEBUG nova.compute.manager [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Refreshing instance network info cache due to event network-changed-b82deb5a-5d45-48f8-977f-9e51a4ee39c9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1433.633856] env[68244]: DEBUG oslo_concurrency.lockutils [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] Acquiring lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.633919] env[68244]: DEBUG oslo_concurrency.lockutils [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] Acquired lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1433.634073] env[68244]: DEBUG nova.network.neutron [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Refreshing network info cache for port b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1433.946827] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781532, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.000304] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.000604] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.000802] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.026902] env[68244]: INFO nova.scheduler.client.report [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted allocations for instance 5519079a-d2a5-48c5-921c-199e0fc60aa3 [ 1434.125220] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781536, 'name': CreateVM_Task, 'duration_secs': 0.49199} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.125448] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1434.126112] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'boot_index': 0, 'guest_format': None, 'attachment_id': '890efb68-f082-4a1c-8d6a-fb75b83487cb', 'disk_bus': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559202', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'name': 'volume-f73eeacc-040a-4905-bf66-efaacecff4fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff690eea-6e5a-42a3-bf85-1b844425df2a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'serial': 'f73eeacc-040a-4905-bf66-efaacecff4fe'}, 'volume_type': None}], 'swap': None} {{(pid=68244) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1434.126339] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Root volume attach. Driver type: vmdk {{(pid=68244) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1434.127736] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f401a0e0-8de2-4ae2-9c9e-8300302e95c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.135517] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c031b5-b95f-4887-a58d-9e479f4db718 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.143347] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebebf5cc-dc37-4124-99ba-c0e1cee685ca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.152534] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-9269a92a-b546-4060-b3aa-0cb7a5194702 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.159553] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1434.159553] env[68244]: value = "task-2781537" [ 1434.159553] env[68244]: _type = "Task" [ 1434.159553] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.167720] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781537, 'name': RelocateVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.337345] env[68244]: DEBUG nova.network.neutron [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updated VIF entry in instance network info cache for port b82deb5a-5d45-48f8-977f-9e51a4ee39c9. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1434.337746] env[68244]: DEBUG nova.network.neutron [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updating instance_info_cache with network_info: [{"id": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "address": "fa:16:3e:9a:60:e8", "network": {"id": "952ccf2f-4044-4955-bad2-b56c7be041f4", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1244966615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711ad4e6a1fb410299cce4337f11c050", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb82deb5a-5d", "ovs_interfaceid": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.446143] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781532, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565498} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.446143] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] d082450a-b0ab-4a54-bc0c-55541cb71fae/d082450a-b0ab-4a54-bc0c-55541cb71fae.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1434.446405] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1434.446569] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-822c1f4e-dc65-422d-8a3c-01b5173957ce {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.454992] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1434.454992] env[68244]: value = "task-2781538" [ 1434.454992] env[68244]: _type = "Task" [ 1434.454992] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.467446] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781538, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.534291] env[68244]: DEBUG oslo_concurrency.lockutils [None req-83b5ffa5-bb50-4a96-a3f7-3c4056111e57 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "5519079a-d2a5-48c5-921c-199e0fc60aa3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.047s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.671152] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781537, 'name': RelocateVM_Task} progress is 19%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.840916] env[68244]: DEBUG oslo_concurrency.lockutils [req-32484a14-27f3-4fe3-8ea7-99ca7fdefcd0 req-d3246b0b-8662-4f96-a87b-a94a884c8f40 service nova] Releasing lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1434.965964] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781538, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112337} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.968175] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1434.968175] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497843b2-0a90-447d-ade2-df4c77b11787 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.989450] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] d082450a-b0ab-4a54-bc0c-55541cb71fae/d082450a-b0ab-4a54-bc0c-55541cb71fae.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1434.989723] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-644cc645-8a2c-485c-aaf4-a3671e8f7a51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.011063] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1435.011063] env[68244]: value = "task-2781539" [ 1435.011063] env[68244]: _type = "Task" [ 1435.011063] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.019017] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781539, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.160587] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.160959] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.161233] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.161479] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.161632] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1435.166880] env[68244]: INFO nova.compute.manager [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Terminating instance [ 1435.173795] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781537, 'name': RelocateVM_Task, 'duration_secs': 0.934281} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.174198] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Volume attach. Driver type: vmdk {{(pid=68244) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1435.174316] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559202', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'name': 'volume-f73eeacc-040a-4905-bf66-efaacecff4fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff690eea-6e5a-42a3-bf85-1b844425df2a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'serial': 'f73eeacc-040a-4905-bf66-efaacecff4fe'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1435.175213] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3ab4c8-8d2e-4a2c-9976-9a3e795b6126 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.192768] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21494689-a470-4ed4-b534-e64d69b04cd7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.215378] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] volume-f73eeacc-040a-4905-bf66-efaacecff4fe/volume-f73eeacc-040a-4905-bf66-efaacecff4fe.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.215634] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de236e9f-13bf-49f1-8411-3f61e529e4b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.235609] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1435.235609] env[68244]: value = "task-2781540" [ 1435.235609] env[68244]: _type = "Task" [ 1435.235609] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.243372] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781540, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.521057] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781539, 'name': ReconfigVM_Task, 'duration_secs': 0.294982} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.521343] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Reconfigured VM instance instance-00000077 to attach disk [datastore2] d082450a-b0ab-4a54-bc0c-55541cb71fae/d082450a-b0ab-4a54-bc0c-55541cb71fae.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.521998] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70721cc6-9275-4f89-a7c1-3a599dd391b5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.528201] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1435.528201] env[68244]: value = "task-2781541" [ 1435.528201] env[68244]: _type = "Task" [ 1435.528201] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.535710] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781541, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.671200] env[68244]: DEBUG nova.compute.manager [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1435.671554] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1435.672586] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32471630-bef6-48f5-874a-cc98ab7a2d70 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.680608] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.680880] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7277da0b-359f-4309-8221-70f252667a7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.687150] env[68244]: DEBUG oslo_vmware.api [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1435.687150] env[68244]: value = "task-2781542" [ 1435.687150] env[68244]: _type = "Task" [ 1435.687150] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.695400] env[68244]: DEBUG oslo_vmware.api [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.746113] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781540, 'name': ReconfigVM_Task, 'duration_secs': 0.264191} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.746430] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Reconfigured VM instance instance-00000078 to attach disk [datastore2] volume-f73eeacc-040a-4905-bf66-efaacecff4fe/volume-f73eeacc-040a-4905-bf66-efaacecff4fe.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1435.751175] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a33308d4-7a89-4257-b754-359bc0ad0f5c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.767999] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1435.767999] env[68244]: value = "task-2781543" [ 1435.767999] env[68244]: _type = "Task" [ 1435.767999] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.776501] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.037937] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781541, 'name': Rename_Task, 'duration_secs': 0.15449} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.038425] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.038510] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddfbca42-6bb5-4b67-93c3-3991858f1470 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.045174] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1436.045174] env[68244]: value = "task-2781544" [ 1436.045174] env[68244]: _type = "Task" [ 1436.045174] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.054791] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.197426] env[68244]: DEBUG oslo_vmware.api [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781542, 'name': PowerOffVM_Task, 'duration_secs': 0.195669} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.197691] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1436.197850] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1436.198112] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c031c9b8-c2f9-477b-a146-33517356018c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.265156] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1436.265343] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1436.265530] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleting the datastore file [datastore2] 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1436.265817] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b989bb4d-1bbe-44f9-b2b7-5166d83ae93f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.273413] env[68244]: DEBUG oslo_vmware.api [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1436.273413] env[68244]: value = "task-2781546" [ 1436.273413] env[68244]: _type = "Task" [ 1436.273413] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.276639] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781543, 'name': ReconfigVM_Task, 'duration_secs': 0.130688} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.279693] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559202', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'name': 'volume-f73eeacc-040a-4905-bf66-efaacecff4fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff690eea-6e5a-42a3-bf85-1b844425df2a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'serial': 'f73eeacc-040a-4905-bf66-efaacecff4fe'} {{(pid=68244) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1436.280198] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e17a2ca-7add-4261-b4d7-c36d7789e079 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.286169] env[68244]: DEBUG oslo_vmware.api [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.287323] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1436.287323] env[68244]: value = "task-2781547" [ 1436.287323] env[68244]: _type = "Task" [ 1436.287323] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.294675] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781547, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.555759] env[68244]: DEBUG oslo_vmware.api [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781544, 'name': PowerOnVM_Task, 'duration_secs': 0.482948} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.556116] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1436.556253] env[68244]: INFO nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1436.556418] env[68244]: DEBUG nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1436.557169] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c21e94-d087-47c2-b6b1-038a87a914c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.787416] env[68244]: DEBUG oslo_vmware.api [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181218} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.787517] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1436.787652] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1436.787835] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1436.788034] env[68244]: INFO nova.compute.manager [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1436.788344] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1436.788607] env[68244]: DEBUG nova.compute.manager [-] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1436.788704] env[68244]: DEBUG nova.network.neutron [-] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1436.798720] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781547, 'name': Rename_Task, 'duration_secs': 0.131171} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.798967] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.799215] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a618f20-a2ec-4d7b-b958-80ff693132a6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.805931] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1436.805931] env[68244]: value = "task-2781548" [ 1436.805931] env[68244]: _type = "Task" [ 1436.805931] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.814168] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.048665] env[68244]: DEBUG nova.compute.manager [req-ec5a4a6b-88c4-4694-b550-940bdd5dad9f req-7ad02380-a205-4e63-a666-73d698ffffd8 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Received event network-vif-deleted-53c89888-9d51-40ce-af06-fc64566eb47d {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1437.049216] env[68244]: INFO nova.compute.manager [req-ec5a4a6b-88c4-4694-b550-940bdd5dad9f req-7ad02380-a205-4e63-a666-73d698ffffd8 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Neutron deleted interface 53c89888-9d51-40ce-af06-fc64566eb47d; detaching it from the instance and deleting it from the info cache [ 1437.049216] env[68244]: DEBUG nova.network.neutron [req-ec5a4a6b-88c4-4694-b550-940bdd5dad9f req-7ad02380-a205-4e63-a666-73d698ffffd8 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.073140] env[68244]: INFO nova.compute.manager [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Took 11.89 seconds to build instance. [ 1437.077689] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.319093] env[68244]: DEBUG oslo_vmware.api [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781548, 'name': PowerOnVM_Task, 'duration_secs': 0.500132} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.319283] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.319489] env[68244]: INFO nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Took 4.05 seconds to spawn the instance on the hypervisor. [ 1437.319700] env[68244]: DEBUG nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1437.320538] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3fb81f-edcb-4483-964c-879b04c823a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.531766] env[68244]: DEBUG nova.network.neutron [-] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.551881] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9476b1b6-bda6-4402-bc43-9d5e5366dad3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.562582] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd413546-d9e6-4d80-8735-6018e30c874e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.574964] env[68244]: DEBUG oslo_concurrency.lockutils [None req-e2fb557d-ffbf-407d-a728-532a91c9f8c2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.403s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1437.590115] env[68244]: DEBUG nova.compute.manager [req-ec5a4a6b-88c4-4694-b550-940bdd5dad9f req-7ad02380-a205-4e63-a666-73d698ffffd8 service nova] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Detach interface failed, port_id=53c89888-9d51-40ce-af06-fc64566eb47d, reason: Instance 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1437.676715] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.676985] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.677191] env[68244]: INFO nova.compute.manager [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Shelving [ 1437.837750] env[68244]: INFO nova.compute.manager [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Took 12.28 seconds to build instance. [ 1437.923128] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "8aafc14e-418a-4c43-80b9-54da13550c32" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.923284] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.923500] env[68244]: DEBUG nova.compute.manager [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1437.924444] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9abe23-3025-4c99-9546-12b468942713 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.932103] env[68244]: DEBUG nova.compute.manager [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1437.932450] env[68244]: DEBUG nova.objects.instance [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'flavor' on Instance uuid 8aafc14e-418a-4c43-80b9-54da13550c32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1438.035123] env[68244]: INFO nova.compute.manager [-] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Took 1.25 seconds to deallocate network for instance. [ 1438.077919] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.339559] env[68244]: DEBUG oslo_concurrency.lockutils [None req-37bb4192-5aae-40e1-bf79-7f386bfd9645 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.790s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.542213] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.542468] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1438.542686] env[68244]: DEBUG nova.objects.instance [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'resources' on Instance uuid 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1438.580561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.685908] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1438.686239] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5310ea7f-134c-4ce1-9934-433bb34cc7b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.694912] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1438.694912] env[68244]: value = "task-2781549" [ 1438.694912] env[68244]: _type = "Task" [ 1438.694912] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.703981] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.882312] env[68244]: DEBUG nova.compute.manager [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Received event network-changed-b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1438.882312] env[68244]: DEBUG nova.compute.manager [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Refreshing instance network info cache due to event network-changed-b82deb5a-5d45-48f8-977f-9e51a4ee39c9. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1438.882312] env[68244]: DEBUG oslo_concurrency.lockutils [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] Acquiring lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.882312] env[68244]: DEBUG oslo_concurrency.lockutils [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] Acquired lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1438.882530] env[68244]: DEBUG nova.network.neutron [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Refreshing network info cache for port b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1438.940431] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1438.940974] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09d4769d-2e08-48c9-aee3-b8aac8e8c6d8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.950429] env[68244]: DEBUG oslo_vmware.api [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1438.950429] env[68244]: value = "task-2781550" [ 1438.950429] env[68244]: _type = "Task" [ 1438.950429] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.961762] env[68244]: DEBUG oslo_vmware.api [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.127740] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27551f98-deb0-4ced-8c9c-19c979278a3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.136762] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40678468-5a34-4c62-8b19-55ffcd0ab591 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.173859] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0507d0-ffca-4d61-bbe9-17e867b431b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.182383] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7217112d-553d-40ed-9577-33992d126e8a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.196219] env[68244]: DEBUG nova.compute.provider_tree [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.206149] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781549, 'name': PowerOffVM_Task, 'duration_secs': 0.319578} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.207289] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1439.208489] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8a2d8f-e3de-45aa-8c0f-da4f86cd31bc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.232406] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02212699-074e-460d-a27b-5c48a4cb8621 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.462176] env[68244]: DEBUG oslo_vmware.api [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781550, 'name': PowerOffVM_Task, 'duration_secs': 0.252721} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.462455] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1439.462652] env[68244]: DEBUG nova.compute.manager [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1439.463462] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de5b701-fe3e-4803-a9b2-cd8275ab53fd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.608724] env[68244]: DEBUG nova.network.neutron [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updated VIF entry in instance network info cache for port b82deb5a-5d45-48f8-977f-9e51a4ee39c9. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1439.609110] env[68244]: DEBUG nova.network.neutron [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updating instance_info_cache with network_info: [{"id": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "address": "fa:16:3e:9a:60:e8", "network": {"id": "952ccf2f-4044-4955-bad2-b56c7be041f4", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1244966615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "711ad4e6a1fb410299cce4337f11c050", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb82deb5a-5d", "ovs_interfaceid": "b82deb5a-5d45-48f8-977f-9e51a4ee39c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.701964] env[68244]: DEBUG nova.scheduler.client.report [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1439.743193] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Creating Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1439.743548] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e438454b-fe77-4f3c-afc4-8debb4320ae9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.753519] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1439.753519] env[68244]: value = "task-2781551" [ 1439.753519] env[68244]: _type = "Task" [ 1439.753519] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.762091] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781551, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.977947] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7b32bbba-af7b-42f6-ac6f-3ec7434664f5 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.111945] env[68244]: DEBUG oslo_concurrency.lockutils [req-8be2015f-e8a1-4348-9eca-8de11ad43f31 req-d86dafe1-de4e-4048-9c0f-c2bcd4ca1a30 service nova] Releasing lock "refresh_cache-ff690eea-6e5a-42a3-bf85-1b844425df2a" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1440.207536] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.210072] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.630s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.210327] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.210403] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1440.211316] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c96eeb-eefb-4acf-aa0d-3f915564a51d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.220201] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a9c25a-ed55-4595-b474-d22bcf5d69a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.238837] env[68244]: INFO nova.scheduler.client.report [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted allocations for instance 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715 [ 1440.241074] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28a1262-a176-4747-9e30-881f80b26bc1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.252861] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a346b165-802a-4319-b1a8-676f37363348 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.265123] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781551, 'name': CreateSnapshot_Task, 'duration_secs': 0.418312} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.288937] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Created Snapshot of the VM instance {{(pid=68244) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1440.289314] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180105MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1440.289448] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.289661] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.292749] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2159aae-2727-48bc-b59c-52d4b423653c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.359633] env[68244]: DEBUG nova.objects.instance [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'flavor' on Instance uuid 8aafc14e-418a-4c43-80b9-54da13550c32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.751202] env[68244]: DEBUG oslo_concurrency.lockutils [None req-46ab2486-9858-4103-82c7-332407206019 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.590s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.811789] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Creating linked-clone VM from snapshot {{(pid=68244) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1440.812325] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-95d23493-38fb-4ea5-97c5-9178e1a8c98c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.822486] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1440.822486] env[68244]: value = "task-2781552" [ 1440.822486] env[68244]: _type = "Task" [ 1440.822486] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.830776] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781552, 'name': CloneVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.864964] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.865188] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1440.865408] env[68244]: DEBUG nova.network.neutron [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.865626] env[68244]: DEBUG nova.objects.instance [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'info_cache' on Instance uuid 8aafc14e-418a-4c43-80b9-54da13550c32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1441.318582] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 75bec02f-82f7-4e8d-81da-3c511588be29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1441.318946] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 8aafc14e-418a-4c43-80b9-54da13550c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1441.319230] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance d082450a-b0ab-4a54-bc0c-55541cb71fae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1441.319360] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance ff690eea-6e5a-42a3-bf85-1b844425df2a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1441.319614] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1441.319823] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1441.335929] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781552, 'name': CloneVM_Task} progress is 94%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.369511] env[68244]: DEBUG nova.objects.base [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Object Instance<8aafc14e-418a-4c43-80b9-54da13550c32> lazy-loaded attributes: flavor,info_cache {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1441.389572] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65f87ba-b3f6-4922-a38b-2838c1193765 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.397909] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19d8c8a-4dbc-4ee4-a146-988790808da9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.428620] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e9b611-82ff-4a81-8213-2d453b7de30e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.436406] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c975db24-58c7-4a6f-879d-2baa74f29c9d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.450095] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.833415] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781552, 'name': CloneVM_Task} progress is 95%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.955388] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1442.096927] env[68244]: DEBUG nova.network.neutron [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updating instance_info_cache with network_info: [{"id": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "address": "fa:16:3e:88:de:fe", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap843d0ee5-6e", "ovs_interfaceid": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.334270] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781552, 'name': CloneVM_Task, 'duration_secs': 1.220809} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.334635] env[68244]: INFO nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Created linked-clone VM from snapshot [ 1442.335336] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77e6f31-4ae7-476a-a203-5087b03b9343 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.343145] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Uploading image 249ce37e-7744-4a1e-bb26-642b5f77184d {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1442.369119] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1442.369119] env[68244]: value = "vm-559207" [ 1442.369119] env[68244]: _type = "VirtualMachine" [ 1442.369119] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1442.370140] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b16c00da-ebaf-4693-befb-61d46b2f028d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.376399] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lease: (returnval){ [ 1442.376399] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a4594-d172-ec02-e9b4-b279e1bc7049" [ 1442.376399] env[68244]: _type = "HttpNfcLease" [ 1442.376399] env[68244]: } obtained for exporting VM: (result){ [ 1442.376399] env[68244]: value = "vm-559207" [ 1442.376399] env[68244]: _type = "VirtualMachine" [ 1442.376399] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1442.376913] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the lease: (returnval){ [ 1442.376913] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a4594-d172-ec02-e9b4-b279e1bc7049" [ 1442.376913] env[68244]: _type = "HttpNfcLease" [ 1442.376913] env[68244]: } to be ready. {{(pid=68244) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1442.384253] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1442.384253] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a4594-d172-ec02-e9b4-b279e1bc7049" [ 1442.384253] env[68244]: _type = "HttpNfcLease" [ 1442.384253] env[68244]: } is initializing. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1442.460428] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1442.460594] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.171s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.599182] env[68244]: DEBUG oslo_concurrency.lockutils [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1442.885290] env[68244]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1442.885290] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a4594-d172-ec02-e9b4-b279e1bc7049" [ 1442.885290] env[68244]: _type = "HttpNfcLease" [ 1442.885290] env[68244]: } is ready. {{(pid=68244) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1442.885580] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1442.885580] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a4594-d172-ec02-e9b4-b279e1bc7049" [ 1442.885580] env[68244]: _type = "HttpNfcLease" [ 1442.885580] env[68244]: }. {{(pid=68244) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1442.886313] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb9a9a7-871d-4a4f-9d2a-16717eb0aedb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.893457] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524544e7-54a9-7d08-68b3-09303abead7e/disk-0.vmdk from lease info. {{(pid=68244) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1442.893632] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524544e7-54a9-7d08-68b3-09303abead7e/disk-0.vmdk for reading. {{(pid=68244) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1442.977925] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b15fc921-8cfa-42e3-8c12-eaff323a8c22 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.605293] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.605729] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0831b7a-8ae2-49f8-8fa9-f46cd32a1b9e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.614395] env[68244]: DEBUG oslo_vmware.api [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1443.614395] env[68244]: value = "task-2781555" [ 1443.614395] env[68244]: _type = "Task" [ 1443.614395] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.623173] env[68244]: DEBUG oslo_vmware.api [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.125768] env[68244]: DEBUG oslo_vmware.api [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781555, 'name': PowerOnVM_Task, 'duration_secs': 0.424734} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.126033] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.126226] env[68244]: DEBUG nova.compute.manager [None req-0ca7ffb6-77d5-4666-85fe-415be890cbd8 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1444.127061] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c85651-0377-4679-aac8-5f8fb74934ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.461057] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.461332] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.461574] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.462412] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.462412] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.462412] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1444.877584] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "75bec02f-82f7-4e8d-81da-3c511588be29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1444.877966] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "75bec02f-82f7-4e8d-81da-3c511588be29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.878196] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "75bec02f-82f7-4e8d-81da-3c511588be29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1444.878414] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "75bec02f-82f7-4e8d-81da-3c511588be29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.878594] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "75bec02f-82f7-4e8d-81da-3c511588be29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1444.880885] env[68244]: INFO nova.compute.manager [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Terminating instance [ 1445.385028] env[68244]: DEBUG nova.compute.manager [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1445.385286] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1445.386187] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b521532-f816-4d1b-b65f-e788deaff729 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.395434] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1445.395701] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d5f33c6-df4d-4b41-9db7-c0fa1b0794cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.406121] env[68244]: DEBUG oslo_vmware.api [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1445.406121] env[68244]: value = "task-2781556" [ 1445.406121] env[68244]: _type = "Task" [ 1445.406121] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.407331] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2711116d-3d69-4e11-aa46-2bd57418091b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.417941] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5cca53-8c62-436b-9c01-07b655498377 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Suspending the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1445.421056] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-72a51f7a-9715-4bca-a246-4cefca86db2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.423183] env[68244]: DEBUG oslo_vmware.api [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.429853] env[68244]: DEBUG oslo_vmware.api [None req-ce5cca53-8c62-436b-9c01-07b655498377 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1445.429853] env[68244]: value = "task-2781557" [ 1445.429853] env[68244]: _type = "Task" [ 1445.429853] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.440467] env[68244]: DEBUG oslo_vmware.api [None req-ce5cca53-8c62-436b-9c01-07b655498377 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781557, 'name': SuspendVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.917787] env[68244]: DEBUG oslo_vmware.api [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781556, 'name': PowerOffVM_Task, 'duration_secs': 0.288414} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.918242] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1445.918242] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1445.918439] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f1cb06e-76bd-44ae-ad96-e975c8dfa93f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.940225] env[68244]: DEBUG oslo_vmware.api [None req-ce5cca53-8c62-436b-9c01-07b655498377 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781557, 'name': SuspendVM_Task} progress is 62%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.991793] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1445.992095] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1445.992326] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleting the datastore file [datastore2] 75bec02f-82f7-4e8d-81da-3c511588be29 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1445.992610] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbf83e15-feb3-47fe-9bc7-6cac8d6d0d57 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.001392] env[68244]: DEBUG oslo_vmware.api [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for the task: (returnval){ [ 1446.001392] env[68244]: value = "task-2781559" [ 1446.001392] env[68244]: _type = "Task" [ 1446.001392] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.010665] env[68244]: DEBUG oslo_vmware.api [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.440550] env[68244]: DEBUG oslo_vmware.api [None req-ce5cca53-8c62-436b-9c01-07b655498377 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781557, 'name': SuspendVM_Task, 'duration_secs': 0.686831} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.440833] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5cca53-8c62-436b-9c01-07b655498377 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Suspended the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1446.441037] env[68244]: DEBUG nova.compute.manager [None req-ce5cca53-8c62-436b-9c01-07b655498377 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1446.441878] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179c575c-e9b4-4a92-8dc4-b76778844981 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.512043] env[68244]: DEBUG oslo_vmware.api [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Task: {'id': task-2781559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237851} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.512329] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1446.512485] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1446.512673] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1446.512851] env[68244]: INFO nova.compute.manager [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1446.513210] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1446.513432] env[68244]: DEBUG nova.compute.manager [-] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1446.513536] env[68244]: DEBUG nova.network.neutron [-] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1446.780736] env[68244]: DEBUG nova.compute.manager [req-ed46157b-49e1-4a73-9115-c397304e7def req-3ede947e-8e9e-40eb-8753-527389333e9c service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Received event network-vif-deleted-3cbfb410-db85-46ec-ad9d-96a42b67105e {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1446.780952] env[68244]: INFO nova.compute.manager [req-ed46157b-49e1-4a73-9115-c397304e7def req-3ede947e-8e9e-40eb-8753-527389333e9c service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Neutron deleted interface 3cbfb410-db85-46ec-ad9d-96a42b67105e; detaching it from the instance and deleting it from the info cache [ 1446.781147] env[68244]: DEBUG nova.network.neutron [req-ed46157b-49e1-4a73-9115-c397304e7def req-3ede947e-8e9e-40eb-8753-527389333e9c service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.264914] env[68244]: DEBUG nova.network.neutron [-] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.283226] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd07b312-dfff-4129-aa67-dd8f9dfdf1c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.294438] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87104da6-91a8-4ef2-a548-e0aa4d45399c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.324063] env[68244]: DEBUG nova.compute.manager [req-ed46157b-49e1-4a73-9115-c397304e7def req-3ede947e-8e9e-40eb-8753-527389333e9c service nova] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Detach interface failed, port_id=3cbfb410-db85-46ec-ad9d-96a42b67105e, reason: Instance 75bec02f-82f7-4e8d-81da-3c511588be29 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1447.767480] env[68244]: INFO nova.compute.manager [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Resuming [ 1447.768229] env[68244]: DEBUG nova.objects.instance [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'flavor' on Instance uuid 8aafc14e-418a-4c43-80b9-54da13550c32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1447.769752] env[68244]: INFO nova.compute.manager [-] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Took 1.26 seconds to deallocate network for instance. [ 1448.277120] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1448.277441] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1448.277594] env[68244]: DEBUG nova.objects.instance [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lazy-loading 'resources' on Instance uuid 75bec02f-82f7-4e8d-81da-3c511588be29 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.845330] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f733e283-54d1-4a4b-8594-b7b2d472271f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.854456] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88706fd-7736-4a9f-a2ce-009caa9af12b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.886831] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17178979-98fe-46d3-89e6-4a6efac45a5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.895425] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2088be-da17-4dc8-a557-57a071c58c53 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.909474] env[68244]: DEBUG nova.compute.provider_tree [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.280520] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.280921] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquired lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1449.280921] env[68244]: DEBUG nova.network.neutron [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.413045] env[68244]: DEBUG nova.scheduler.client.report [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1449.697986] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524544e7-54a9-7d08-68b3-09303abead7e/disk-0.vmdk. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1449.698922] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab40690-5ec4-4c92-abac-e4be68a8cdad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.705648] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524544e7-54a9-7d08-68b3-09303abead7e/disk-0.vmdk is in state: ready. {{(pid=68244) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1449.705812] env[68244]: ERROR oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524544e7-54a9-7d08-68b3-09303abead7e/disk-0.vmdk due to incomplete transfer. [ 1449.706077] env[68244]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8298a11d-d37b-4b2f-bda9-0ef035cbc0f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.713902] env[68244]: DEBUG oslo_vmware.rw_handles [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524544e7-54a9-7d08-68b3-09303abead7e/disk-0.vmdk. {{(pid=68244) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1449.714110] env[68244]: DEBUG nova.virt.vmwareapi.images [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Uploaded image 249ce37e-7744-4a1e-bb26-642b5f77184d to the Glance image server {{(pid=68244) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1449.716447] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Destroying the VM {{(pid=68244) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1449.716680] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7c9ba058-b63b-4b4f-9c8a-4e92887bc0ab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.722841] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1449.722841] env[68244]: value = "task-2781560" [ 1449.722841] env[68244]: _type = "Task" [ 1449.722841] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.731057] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781560, 'name': Destroy_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.918264] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.935702] env[68244]: INFO nova.scheduler.client.report [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Deleted allocations for instance 75bec02f-82f7-4e8d-81da-3c511588be29 [ 1449.988544] env[68244]: DEBUG nova.network.neutron [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updating instance_info_cache with network_info: [{"id": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "address": "fa:16:3e:88:de:fe", "network": {"id": "5247141f-2f04-43cf-8f06-2ddd2b7c6aa5", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1608735266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a16375181ca41fead00ee23bd2a9af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92cdccfd-4b10-4024-b724-5f22792dd4de", "external-id": "nsx-vlan-transportzone-902", "segmentation_id": 902, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap843d0ee5-6e", "ovs_interfaceid": "843d0ee5-6e7f-4508-b91e-c871fd48ee83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.233231] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781560, 'name': Destroy_Task, 'duration_secs': 0.35466} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.233490] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Destroyed the VM [ 1450.233677] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Deleting Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1450.233934] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c7c8ac38-655a-4d91-b102-708c454e5102 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.240636] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1450.240636] env[68244]: value = "task-2781561" [ 1450.240636] env[68244]: _type = "Task" [ 1450.240636] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.248325] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781561, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.444180] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ef788f70-c7ee-4823-98f4-96a9a10f0d77 tempest-ServerActionsTestOtherA-258842533 tempest-ServerActionsTestOtherA-258842533-project-member] Lock "75bec02f-82f7-4e8d-81da-3c511588be29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.566s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1450.490958] env[68244]: DEBUG oslo_concurrency.lockutils [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Releasing lock "refresh_cache-8aafc14e-418a-4c43-80b9-54da13550c32" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1450.492075] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42a683b-0ad5-4da7-ac78-3a7e89260b38 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.499888] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Resuming the VM {{(pid=68244) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1450.500153] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ba448e7-aa82-4f71-aeff-174851b135f9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.506976] env[68244]: DEBUG oslo_vmware.api [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1450.506976] env[68244]: value = "task-2781562" [ 1450.506976] env[68244]: _type = "Task" [ 1450.506976] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.516222] env[68244]: DEBUG oslo_vmware.api [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781562, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.750800] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781561, 'name': RemoveSnapshot_Task, 'duration_secs': 0.344552} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.751092] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Deleted Snapshot of the VM instance {{(pid=68244) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1450.751364] env[68244]: DEBUG nova.compute.manager [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1450.752122] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b793a8b1-0892-4313-b480-08858068bda1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.017536] env[68244]: DEBUG oslo_vmware.api [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781562, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.074102] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1451.264631] env[68244]: INFO nova.compute.manager [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Shelve offloading [ 1451.518866] env[68244]: DEBUG oslo_vmware.api [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781562, 'name': PowerOnVM_Task, 'duration_secs': 0.583231} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.519271] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Resumed the VM {{(pid=68244) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1451.519314] env[68244]: DEBUG nova.compute.manager [None req-72219192-31a8-4b1d-9051-833c56c28239 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1451.520084] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b6ab53-ac25-41eb-8a51-2bd0244299cf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.769132] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1451.769445] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae4592e8-49f9-4ee5-9077-30d553ef5c05 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.781054] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1451.781054] env[68244]: value = "task-2781563" [ 1451.781054] env[68244]: _type = "Task" [ 1451.781054] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.788424] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.290898] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1452.291878] env[68244]: DEBUG nova.compute.manager [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1452.292997] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d389999-bd63-4f87-a07e-6a47a8dbd307 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.298785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.298828] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1452.299027] env[68244]: DEBUG nova.network.neutron [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1452.520381] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "8aafc14e-418a-4c43-80b9-54da13550c32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.520679] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1452.520850] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "8aafc14e-418a-4c43-80b9-54da13550c32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.521081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1452.521261] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1452.523806] env[68244]: INFO nova.compute.manager [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Terminating instance [ 1453.016314] env[68244]: DEBUG nova.network.neutron [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Updating instance_info_cache with network_info: [{"id": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "address": "fa:16:3e:f7:4c:df", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e8c373d-1e", "ovs_interfaceid": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.027360] env[68244]: DEBUG nova.compute.manager [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1453.027579] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1453.028713] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10259c41-ed29-43f0-b136-97d5cbff7bf0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.037699] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1453.037914] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1eba11af-d265-4ec6-9a18-39a203e8ab54 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.045420] env[68244]: DEBUG oslo_vmware.api [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1453.045420] env[68244]: value = "task-2781564" [ 1453.045420] env[68244]: _type = "Task" [ 1453.045420] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.055899] env[68244]: DEBUG oslo_vmware.api [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.519497] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1453.556053] env[68244]: DEBUG oslo_vmware.api [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781564, 'name': PowerOffVM_Task, 'duration_secs': 0.201592} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.556466] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1453.556582] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1453.556782] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ad8feba-9b19-4f08-a4d0-80d3ac76a9d1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.623792] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1453.623936] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1453.624159] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleting the datastore file [datastore2] 8aafc14e-418a-4c43-80b9-54da13550c32 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1453.624424] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6f529af-21d5-454c-a9fd-f75cb2dfb302 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.631953] env[68244]: DEBUG oslo_vmware.api [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for the task: (returnval){ [ 1453.631953] env[68244]: value = "task-2781566" [ 1453.631953] env[68244]: _type = "Task" [ 1453.631953] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.641314] env[68244]: DEBUG oslo_vmware.api [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.770070] env[68244]: DEBUG nova.compute.manager [req-7c15e3db-d915-4cf6-af0b-4e43b8b53321 req-c9b1a115-3bec-4726-ac38-779b2b1a7872 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Received event network-vif-unplugged-3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1453.770309] env[68244]: DEBUG oslo_concurrency.lockutils [req-7c15e3db-d915-4cf6-af0b-4e43b8b53321 req-c9b1a115-3bec-4726-ac38-779b2b1a7872 service nova] Acquiring lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1453.770519] env[68244]: DEBUG oslo_concurrency.lockutils [req-7c15e3db-d915-4cf6-af0b-4e43b8b53321 req-c9b1a115-3bec-4726-ac38-779b2b1a7872 service nova] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1453.770690] env[68244]: DEBUG oslo_concurrency.lockutils [req-7c15e3db-d915-4cf6-af0b-4e43b8b53321 req-c9b1a115-3bec-4726-ac38-779b2b1a7872 service nova] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1453.770861] env[68244]: DEBUG nova.compute.manager [req-7c15e3db-d915-4cf6-af0b-4e43b8b53321 req-c9b1a115-3bec-4726-ac38-779b2b1a7872 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] No waiting events found dispatching network-vif-unplugged-3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1453.771061] env[68244]: WARNING nova.compute.manager [req-7c15e3db-d915-4cf6-af0b-4e43b8b53321 req-c9b1a115-3bec-4726-ac38-779b2b1a7872 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Received unexpected event network-vif-unplugged-3e8c373d-1ea2-4964-876e-fe7a4a60a83a for instance with vm_state shelved and task_state shelving_offloading. [ 1453.791290] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1453.792244] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88970e7b-ad1c-462a-a275-6bc082a2002e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.800438] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1453.800686] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef7853e2-0904-4af2-b782-e8a35084ae80 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.869153] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1453.869367] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1453.869555] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleting the datastore file [datastore2] d082450a-b0ab-4a54-bc0c-55541cb71fae {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1453.869822] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c838704c-036e-498f-92a4-d378a6d5ed5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.876072] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1453.876072] env[68244]: value = "task-2781568" [ 1453.876072] env[68244]: _type = "Task" [ 1453.876072] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.884928] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.070020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1454.070257] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1454.142080] env[68244]: DEBUG oslo_vmware.api [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Task: {'id': task-2781566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144624} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.142336] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1454.142517] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1454.142694] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1454.142868] env[68244]: INFO nova.compute.manager [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1454.143115] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1454.143335] env[68244]: DEBUG nova.compute.manager [-] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1454.143462] env[68244]: DEBUG nova.network.neutron [-] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1454.386165] env[68244]: DEBUG oslo_vmware.api [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122946} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.386436] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1454.386615] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1454.386802] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1454.409687] env[68244]: INFO nova.scheduler.client.report [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted allocations for instance d082450a-b0ab-4a54-bc0c-55541cb71fae [ 1454.572865] env[68244]: DEBUG nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1454.914197] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1454.914507] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1454.914884] env[68244]: DEBUG nova.objects.instance [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'resources' on Instance uuid d082450a-b0ab-4a54-bc0c-55541cb71fae {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1455.067927] env[68244]: DEBUG nova.network.neutron [-] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.091993] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1455.417764] env[68244]: DEBUG nova.objects.instance [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'numa_topology' on Instance uuid d082450a-b0ab-4a54-bc0c-55541cb71fae {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1455.570677] env[68244]: INFO nova.compute.manager [-] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Took 1.43 seconds to deallocate network for instance. [ 1455.749149] env[68244]: DEBUG oslo_concurrency.lockutils [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1455.797016] env[68244]: DEBUG nova.compute.manager [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Received event network-changed-3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1455.797226] env[68244]: DEBUG nova.compute.manager [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Refreshing instance network info cache due to event network-changed-3e8c373d-1ea2-4964-876e-fe7a4a60a83a. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1455.797442] env[68244]: DEBUG oslo_concurrency.lockutils [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] Acquiring lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.797587] env[68244]: DEBUG oslo_concurrency.lockutils [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] Acquired lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1455.797747] env[68244]: DEBUG nova.network.neutron [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Refreshing network info cache for port 3e8c373d-1ea2-4964-876e-fe7a4a60a83a {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1455.920840] env[68244]: DEBUG nova.objects.base [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1455.970730] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e53eda0-e320-42ad-b727-b1ed0883bd5e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.978385] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0e133d-6c7b-4ef1-8986-6b0d6564e4e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.009830] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a742b1f3-a352-4d31-a0a3-136ce799b750 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.016483] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514bfbe3-b760-4791-9d42-7a76b00546da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.029072] env[68244]: DEBUG nova.compute.provider_tree [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1456.076920] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.501763] env[68244]: DEBUG nova.network.neutron [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Updated VIF entry in instance network info cache for port 3e8c373d-1ea2-4964-876e-fe7a4a60a83a. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1456.502375] env[68244]: DEBUG nova.network.neutron [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Updating instance_info_cache with network_info: [{"id": "3e8c373d-1ea2-4964-876e-fe7a4a60a83a", "address": "fa:16:3e:f7:4c:df", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": null, "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap3e8c373d-1e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.532459] env[68244]: DEBUG nova.scheduler.client.report [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1457.005948] env[68244]: DEBUG oslo_concurrency.lockutils [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] Releasing lock "refresh_cache-d082450a-b0ab-4a54-bc0c-55541cb71fae" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1457.006340] env[68244]: DEBUG nova.compute.manager [req-b52ee93d-95ad-42fd-99e2-b83eb34a618e req-46e7d144-4fe0-4864-a6d0-0287c5a6ca41 service nova] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Received event network-vif-deleted-843d0ee5-6e7f-4508-b91e-c871fd48ee83 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1457.037361] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.123s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1457.039845] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.948s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1457.041269] env[68244]: INFO nova.compute.claims [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1457.551099] env[68244]: DEBUG oslo_concurrency.lockutils [None req-6eb97807-d386-47df-b664-3c9b0b9215a2 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.874s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1457.551996] env[68244]: DEBUG oslo_concurrency.lockutils [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.803s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1457.552232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1457.552564] env[68244]: DEBUG oslo_concurrency.lockutils [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1457.552759] env[68244]: DEBUG oslo_concurrency.lockutils [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1457.554413] env[68244]: INFO nova.compute.manager [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Terminating instance [ 1458.058942] env[68244]: DEBUG nova.compute.manager [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1458.059269] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.059462] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ba8d474-868c-4edd-84fb-f6a6b1e5519c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.071521] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6fb0ca-d939-4f9d-91e9-d1762b1681d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.097749] env[68244]: WARNING nova.virt.vmwareapi.vmops [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d082450a-b0ab-4a54-bc0c-55541cb71fae could not be found. [ 1458.097940] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1458.098138] env[68244]: INFO nova.compute.manager [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1458.098394] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1458.098764] env[68244]: DEBUG nova.compute.manager [-] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1458.098764] env[68244]: DEBUG nova.network.neutron [-] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1458.111562] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f9cc69-e364-41ee-866e-8758d9974dde {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.118605] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68984ae1-04f6-418c-8f60-e11d3f78fba7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.151198] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ba974d-3c67-48d7-afb9-3a4a7b8ed44c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.158496] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a63f1d-cdd7-4a1d-b582-70191566394f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.171365] env[68244]: DEBUG nova.compute.provider_tree [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1458.674264] env[68244]: DEBUG nova.scheduler.client.report [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1458.805864] env[68244]: DEBUG nova.network.neutron [-] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.179315] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.139s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1459.179830] env[68244]: DEBUG nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1459.182761] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.106s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1459.182974] env[68244]: DEBUG nova.objects.instance [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lazy-loading 'resources' on Instance uuid 8aafc14e-418a-4c43-80b9-54da13550c32 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.308471] env[68244]: INFO nova.compute.manager [-] [instance: d082450a-b0ab-4a54-bc0c-55541cb71fae] Took 1.21 seconds to deallocate network for instance. [ 1459.685894] env[68244]: DEBUG nova.compute.utils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1459.690456] env[68244]: DEBUG nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1459.735590] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588f93f4-aee4-42d8-9ca5-fecd5cb90995 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.743302] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610cd4bb-bc30-413b-ac75-af0b9941f736 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.773549] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cda16e-ac9f-44bb-a812-7696e712ec66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.780270] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbb10c6-c6f2-4d55-b1cd-493a7b31b24b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.792848] env[68244]: DEBUG nova.compute.provider_tree [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.191224] env[68244]: DEBUG nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1460.295671] env[68244]: DEBUG nova.scheduler.client.report [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1460.333126] env[68244]: DEBUG oslo_concurrency.lockutils [None req-68e6019f-a0a4-4ef5-906f-98f07512ae2b tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "d082450a-b0ab-4a54-bc0c-55541cb71fae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.781s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.800415] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.820834] env[68244]: INFO nova.scheduler.client.report [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Deleted allocations for instance 8aafc14e-418a-4c43-80b9-54da13550c32 [ 1461.203659] env[68244]: DEBUG nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1461.224107] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1461.224371] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1461.224529] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1461.224716] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1461.224913] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1461.225127] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1461.225294] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1461.225451] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1461.225615] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1461.225780] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1461.225950] env[68244]: DEBUG nova.virt.hardware [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1461.226882] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60dabf8f-c26e-4cde-833e-68aaaf45c06a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.236158] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a212671-c922-462c-91f8-919a7f76d80d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.249148] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1461.254656] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Creating folder: Project (9d762d264385422ebb970d01f98ab2c8). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1461.254903] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca938c97-f43f-41e3-97d0-985885b2422a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.265548] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Created folder: Project (9d762d264385422ebb970d01f98ab2c8) in parent group-v558876. [ 1461.265756] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Creating folder: Instances. Parent ref: group-v559208. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1461.265990] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2097793f-4c5c-416c-99f4-342a99d36a48 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.274738] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Created folder: Instances in parent group-v559208. [ 1461.274954] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1461.275150] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1461.275337] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29b3b40b-af19-4f6a-81a4-c45feae456f1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.291009] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1461.291009] env[68244]: value = "task-2781571" [ 1461.291009] env[68244]: _type = "Task" [ 1461.291009] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.298654] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781571, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.328581] env[68244]: DEBUG oslo_concurrency.lockutils [None req-2ff22aa8-c3b8-48d8-ae78-c6bdf402f869 tempest-ServerActionsTestJSON-662805750 tempest-ServerActionsTestJSON-662805750-project-member] Lock "8aafc14e-418a-4c43-80b9-54da13550c32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.808s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1461.633016] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "2d539645-1fd5-4c8d-813b-129677ebb11c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1461.633194] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1461.800028] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781571, 'name': CreateVM_Task, 'duration_secs': 0.238282} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.800257] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1461.800640] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.800802] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1461.801142] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1461.801399] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aafe386-ba71-4a66-81d1-9409e1df34c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.805506] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1461.805506] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52132361-51c4-a674-c45b-34a54290d303" [ 1461.805506] env[68244]: _type = "Task" [ 1461.805506] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.812588] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52132361-51c4-a674-c45b-34a54290d303, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.136013] env[68244]: DEBUG nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1462.316486] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52132361-51c4-a674-c45b-34a54290d303, 'name': SearchDatastore_Task, 'duration_secs': 0.008956} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.316854] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1462.317024] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.317266] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.317417] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1462.317594] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.317852] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b99bd23a-d392-4c10-b718-f56d37005f59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.325963] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.326162] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1462.326942] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a01f3d7-85f4-48e8-87e9-61c978990a64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.332807] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1462.332807] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d0a332-78ac-8139-7b62-9fa6904ce9c5" [ 1462.332807] env[68244]: _type = "Task" [ 1462.332807] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.340814] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d0a332-78ac-8139-7b62-9fa6904ce9c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.657076] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1462.657361] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1462.658829] env[68244]: INFO nova.compute.claims [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1462.845770] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d0a332-78ac-8139-7b62-9fa6904ce9c5, 'name': SearchDatastore_Task, 'duration_secs': 0.007852} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.846847] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14ef64f3-a2f1-469a-8599-af8cf96296f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.851320] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1462.851320] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d79e01-3eda-4050-4747-647129ae369e" [ 1462.851320] env[68244]: _type = "Task" [ 1462.851320] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.858910] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d79e01-3eda-4050-4747-647129ae369e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.366705] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52d79e01-3eda-4050-4747-647129ae369e, 'name': SearchDatastore_Task, 'duration_secs': 0.009358} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.367123] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1463.367744] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.368406] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a35afcb5-e1cf-41a1-a00f-1d1c11c93638 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.376421] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1463.376421] env[68244]: value = "task-2781572" [ 1463.376421] env[68244]: _type = "Task" [ 1463.376421] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.384677] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.728543] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a530bfc9-06b7-4447-afa8-664fae7665b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.736769] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bda83ca-053e-4a64-995c-984075006150 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.767816] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a680d599-a380-4ca5-b851-af3a1e7c6d4f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.775895] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eca57aa-5b64-4fb2-9b73-b5d74272cf1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.790054] env[68244]: DEBUG nova.compute.provider_tree [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1463.887522] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781572, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431067} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.887781] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1463.887996] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1463.888265] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e71ef895-cf6d-4f09-b826-465beb5988e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.894227] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1463.894227] env[68244]: value = "task-2781573" [ 1463.894227] env[68244]: _type = "Task" [ 1463.894227] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.902635] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.293827] env[68244]: DEBUG nova.scheduler.client.report [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1464.404640] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060992} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.405115] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1464.405641] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7843747-0b64-4e2c-bb67-326d6855b6bb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.425522] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1464.425767] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f803095-bebf-4d59-8643-150518ab91eb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.444479] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1464.444479] env[68244]: value = "task-2781574" [ 1464.444479] env[68244]: _type = "Task" [ 1464.444479] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.451784] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781574, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.787319] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "92b166d2-bc8b-44ba-adf0-3286285c7611" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1464.787547] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "92b166d2-bc8b-44ba-adf0-3286285c7611" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1464.798499] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1464.798940] env[68244]: DEBUG nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1464.955071] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781574, 'name': ReconfigVM_Task, 'duration_secs': 0.283024} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.955310] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1464.955928] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67a8e326-f825-46f0-a805-ab9f41e155fb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.962670] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1464.962670] env[68244]: value = "task-2781575" [ 1464.962670] env[68244]: _type = "Task" [ 1464.962670] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.970146] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781575, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.290296] env[68244]: DEBUG nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1465.303463] env[68244]: DEBUG nova.compute.utils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1465.304983] env[68244]: DEBUG nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1465.305168] env[68244]: DEBUG nova.network.neutron [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1465.350693] env[68244]: DEBUG nova.policy [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a17d79ef7b14c178c98a60499967c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e3ca107ec07495cb1876bd472e0cd8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1465.473612] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781575, 'name': Rename_Task, 'duration_secs': 0.146255} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.473612] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1465.473612] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e67d3acd-1525-4d95-930f-909155ed0f99 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.480370] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1465.480370] env[68244]: value = "task-2781576" [ 1465.480370] env[68244]: _type = "Task" [ 1465.480370] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.488087] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.614292] env[68244]: DEBUG nova.network.neutron [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Successfully created port: 688b685f-f503-4ab8-b517-385ace82b8d8 {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1465.808044] env[68244]: DEBUG nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1465.814464] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.814464] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.814464] env[68244]: INFO nova.compute.claims [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1465.991207] env[68244]: DEBUG oslo_vmware.api [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781576, 'name': PowerOnVM_Task, 'duration_secs': 0.404674} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.991431] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1465.991626] env[68244]: INFO nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Took 4.79 seconds to spawn the instance on the hypervisor. [ 1465.991848] env[68244]: DEBUG nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1465.992640] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2436e44-500a-4bfa-ba31-9615835aeb20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.508357] env[68244]: INFO nova.compute.manager [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Took 11.43 seconds to build instance. [ 1466.825301] env[68244]: DEBUG nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1466.858811] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1466.859076] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1466.859238] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1466.859420] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1466.859563] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1466.859705] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1466.859915] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1466.860086] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1466.860254] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1466.860412] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1466.860583] env[68244]: DEBUG nova.virt.hardware [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1466.861486] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873c1b14-8823-485e-8c67-890809c1fdb1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.872847] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d1eda4-1833-489a-851d-4fef91baa698 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.902689] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b345a3-7a1f-40d3-a550-a48add956587 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.910360] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e908b541-3624-493b-ba1b-d110eee29407 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.941474] env[68244]: INFO nova.compute.manager [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Rebuilding instance [ 1466.944839] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dfa2f1-dd81-4024-ad0c-55b1d6457395 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.952351] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8788b29-6990-4aa3-bde9-d1a63f7117d3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.968503] env[68244]: DEBUG nova.compute.provider_tree [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1466.979345] env[68244]: DEBUG nova.compute.manager [req-9d0dd3fa-abb1-4174-868b-194fe9bac727 req-29a8c736-64b1-402f-8248-e74d4a1ffbcb service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Received event network-vif-plugged-688b685f-f503-4ab8-b517-385ace82b8d8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1466.979345] env[68244]: DEBUG oslo_concurrency.lockutils [req-9d0dd3fa-abb1-4174-868b-194fe9bac727 req-29a8c736-64b1-402f-8248-e74d4a1ffbcb service nova] Acquiring lock "2d539645-1fd5-4c8d-813b-129677ebb11c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1466.979345] env[68244]: DEBUG oslo_concurrency.lockutils [req-9d0dd3fa-abb1-4174-868b-194fe9bac727 req-29a8c736-64b1-402f-8248-e74d4a1ffbcb service nova] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1466.979523] env[68244]: DEBUG oslo_concurrency.lockutils [req-9d0dd3fa-abb1-4174-868b-194fe9bac727 req-29a8c736-64b1-402f-8248-e74d4a1ffbcb service nova] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1466.979636] env[68244]: DEBUG nova.compute.manager [req-9d0dd3fa-abb1-4174-868b-194fe9bac727 req-29a8c736-64b1-402f-8248-e74d4a1ffbcb service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] No waiting events found dispatching network-vif-plugged-688b685f-f503-4ab8-b517-385ace82b8d8 {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1466.979799] env[68244]: WARNING nova.compute.manager [req-9d0dd3fa-abb1-4174-868b-194fe9bac727 req-29a8c736-64b1-402f-8248-e74d4a1ffbcb service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Received unexpected event network-vif-plugged-688b685f-f503-4ab8-b517-385ace82b8d8 for instance with vm_state building and task_state spawning. [ 1466.989751] env[68244]: DEBUG nova.compute.manager [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1466.990810] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb58520-1768-4606-acd7-f355fe21ae4b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.010326] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7eaf5bc3-3461-4fde-b5fe-4bdf9590386e tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.940s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1467.071737] env[68244]: DEBUG nova.network.neutron [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Successfully updated port: 688b685f-f503-4ab8-b517-385ace82b8d8 {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1467.475517] env[68244]: DEBUG nova.scheduler.client.report [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1467.575542] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-2d539645-1fd5-4c8d-813b-129677ebb11c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.575542] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-2d539645-1fd5-4c8d-813b-129677ebb11c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1467.575542] env[68244]: DEBUG nova.network.neutron [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1467.982361] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1467.982814] env[68244]: DEBUG nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1468.004476] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1468.004788] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-becd8a98-cdb8-4972-a727-ebb9b41f727c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.012521] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1468.012521] env[68244]: value = "task-2781577" [ 1468.012521] env[68244]: _type = "Task" [ 1468.012521] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.021065] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.110582] env[68244]: DEBUG nova.network.neutron [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1468.246901] env[68244]: DEBUG nova.network.neutron [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Updating instance_info_cache with network_info: [{"id": "688b685f-f503-4ab8-b517-385ace82b8d8", "address": "fa:16:3e:0f:8a:4b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap688b685f-f5", "ovs_interfaceid": "688b685f-f503-4ab8-b517-385ace82b8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.488069] env[68244]: DEBUG nova.compute.utils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1468.489539] env[68244]: DEBUG nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1468.522085] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781577, 'name': PowerOffVM_Task, 'duration_secs': 0.18014} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.522303] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1468.522528] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1468.523289] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a31527-3b58-4428-9968-d97e65777583 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.529852] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1468.530084] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3db2c0b6-779c-4d6a-9668-3599bf1908ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.558545] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1468.558764] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1468.558949] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Deleting the datastore file [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1468.559322] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a1a5742-580f-4cd9-a339-737501599c7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.566321] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1468.566321] env[68244]: value = "task-2781579" [ 1468.566321] env[68244]: _type = "Task" [ 1468.566321] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.574454] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.750118] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-2d539645-1fd5-4c8d-813b-129677ebb11c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1468.750562] env[68244]: DEBUG nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Instance network_info: |[{"id": "688b685f-f503-4ab8-b517-385ace82b8d8", "address": "fa:16:3e:0f:8a:4b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap688b685f-f5", "ovs_interfaceid": "688b685f-f503-4ab8-b517-385ace82b8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1468.751126] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:8a:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '688b685f-f503-4ab8-b517-385ace82b8d8', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1468.759088] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1468.759306] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1468.759537] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b40b2350-ad0e-4829-a361-f8e2628110d9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.779436] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1468.779436] env[68244]: value = "task-2781580" [ 1468.779436] env[68244]: _type = "Task" [ 1468.779436] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.786290] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781580, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.991066] env[68244]: DEBUG nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1469.009944] env[68244]: DEBUG nova.compute.manager [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Received event network-changed-688b685f-f503-4ab8-b517-385ace82b8d8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1469.010156] env[68244]: DEBUG nova.compute.manager [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Refreshing instance network info cache due to event network-changed-688b685f-f503-4ab8-b517-385ace82b8d8. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1469.010402] env[68244]: DEBUG oslo_concurrency.lockutils [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] Acquiring lock "refresh_cache-2d539645-1fd5-4c8d-813b-129677ebb11c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.010536] env[68244]: DEBUG oslo_concurrency.lockutils [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] Acquired lock "refresh_cache-2d539645-1fd5-4c8d-813b-129677ebb11c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1469.010661] env[68244]: DEBUG nova.network.neutron [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Refreshing network info cache for port 688b685f-f503-4ab8-b517-385ace82b8d8 {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.075968] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09437} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.076542] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1469.076772] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1469.076935] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1469.289212] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781580, 'name': CreateVM_Task, 'duration_secs': 0.306529} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.289452] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1469.290095] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.290272] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1469.290594] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1469.290853] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2056c702-e4f6-453d-b523-abd6c54c3034 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.295480] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1469.295480] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52077444-b480-3a96-d367-b56ea2ad048b" [ 1469.295480] env[68244]: _type = "Task" [ 1469.295480] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.304275] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52077444-b480-3a96-d367-b56ea2ad048b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.692078] env[68244]: DEBUG nova.network.neutron [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Updated VIF entry in instance network info cache for port 688b685f-f503-4ab8-b517-385ace82b8d8. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1469.692444] env[68244]: DEBUG nova.network.neutron [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Updating instance_info_cache with network_info: [{"id": "688b685f-f503-4ab8-b517-385ace82b8d8", "address": "fa:16:3e:0f:8a:4b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap688b685f-f5", "ovs_interfaceid": "688b685f-f503-4ab8-b517-385ace82b8d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.805793] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52077444-b480-3a96-d367-b56ea2ad048b, 'name': SearchDatastore_Task, 'duration_secs': 0.014043} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.806175] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1469.806350] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1469.806586] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.806731] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1469.806906] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1469.807178] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1636a64f-ccfa-4f32-ad89-38e6c40079f0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.815122] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1469.815292] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1469.815945] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e008045-c334-48f9-bd32-b016e19c0836 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.820714] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1469.820714] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527cb42d-3cee-3207-b2c3-a27872bc8969" [ 1469.820714] env[68244]: _type = "Task" [ 1469.820714] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.829097] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527cb42d-3cee-3207-b2c3-a27872bc8969, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.000491] env[68244]: DEBUG nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1470.027662] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1470.027917] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.028089] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1470.028316] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.028478] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1470.028624] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1470.028835] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1470.028995] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1470.029174] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1470.029333] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1470.029502] env[68244]: DEBUG nova.virt.hardware [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1470.030344] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395cf726-eee0-4283-a602-75fafebca63d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.038305] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8004dc47-12fb-4b62-a38d-f246ab824634 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.051594] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1470.057082] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Creating folder: Project (effd0485685c471c9eca8f42fad10f78). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1470.057340] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0be85c2-95b9-4a1d-99b0-9b971a7e2ac2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.067428] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Created folder: Project (effd0485685c471c9eca8f42fad10f78) in parent group-v558876. [ 1470.067601] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Creating folder: Instances. Parent ref: group-v559212. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1470.067801] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e64eb458-6556-4090-9650-41631549c927 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.076733] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Created folder: Instances in parent group-v559212. [ 1470.076939] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1470.077127] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1470.077311] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-501d6ec5-5d53-4bdb-956b-d18cfce553da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.095040] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1470.095040] env[68244]: value = "task-2781583" [ 1470.095040] env[68244]: _type = "Task" [ 1470.095040] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.101818] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781583, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.109199] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1470.109428] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.109584] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1470.109763] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.109906] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1470.110064] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1470.110277] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1470.110434] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1470.110597] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1470.110758] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1470.110926] env[68244]: DEBUG nova.virt.hardware [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1470.111669] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af023d3-e581-46b6-a13f-8627e8b0e040 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.118199] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d732cf62-e4d9-431c-836b-a12a978520ea {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.133990] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1470.139486] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1470.139715] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1470.139919] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22648f48-2f79-4175-953c-30d05fd3d277 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.155532] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1470.155532] env[68244]: value = "task-2781584" [ 1470.155532] env[68244]: _type = "Task" [ 1470.155532] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.163050] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781584, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.194974] env[68244]: DEBUG oslo_concurrency.lockutils [req-e1b345a8-2026-473e-9914-266d88a2d135 req-5b1931b6-ce4b-4e4b-a25d-574a27862fe8 service nova] Releasing lock "refresh_cache-2d539645-1fd5-4c8d-813b-129677ebb11c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1470.331516] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527cb42d-3cee-3207-b2c3-a27872bc8969, 'name': SearchDatastore_Task, 'duration_secs': 0.008863} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.332273] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c7b0fa3-dc66-4c10-9f9f-55a719cdbacb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.339558] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1470.339558] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bc4660-58da-2019-dfe4-13ca03a00478" [ 1470.339558] env[68244]: _type = "Task" [ 1470.339558] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.347526] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bc4660-58da-2019-dfe4-13ca03a00478, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.604254] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781583, 'name': CreateVM_Task, 'duration_secs': 0.242292} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.604416] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1470.604838] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.605035] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1470.605357] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1470.605606] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11821c3a-9b06-4176-8f12-997ea184227e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.610505] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1470.610505] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525decb9-452f-477b-0fd4-281f4512f6b3" [ 1470.610505] env[68244]: _type = "Task" [ 1470.610505] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.617818] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525decb9-452f-477b-0fd4-281f4512f6b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.663475] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781584, 'name': CreateVM_Task, 'duration_secs': 0.225358} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.663639] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1470.664050] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.851462] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52bc4660-58da-2019-dfe4-13ca03a00478, 'name': SearchDatastore_Task, 'duration_secs': 0.010022} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.851726] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1470.851954] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2d539645-1fd5-4c8d-813b-129677ebb11c/2d539645-1fd5-4c8d-813b-129677ebb11c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1470.852233] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36ea3d9f-85ab-4dca-9b74-52558aba4c6f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.857951] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1470.857951] env[68244]: value = "task-2781585" [ 1470.857951] env[68244]: _type = "Task" [ 1470.857951] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.865097] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.123121] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525decb9-452f-477b-0fd4-281f4512f6b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.123444] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1471.123729] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1471.124014] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.124220] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1471.124409] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.124808] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1471.125178] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1471.125461] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98633203-26e0-4d49-8485-eb58976b86a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.127584] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ee8bd89-f1ad-4b1e-be08-afed3ea47e7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.135088] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1471.135088] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d4fba-f143-8c4f-dc10-b183271101c8" [ 1471.135088] env[68244]: _type = "Task" [ 1471.135088] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.146171] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d4fba-f143-8c4f-dc10-b183271101c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.153132] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.153376] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1471.154238] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa2a7d9d-2b3a-4765-96db-fbfd11dd51ba {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.160579] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1471.160579] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a4d0c2-267b-ea31-d795-372fd61296f1" [ 1471.160579] env[68244]: _type = "Task" [ 1471.160579] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.169928] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a4d0c2-267b-ea31-d795-372fd61296f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.368040] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430094} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.368298] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 2d539645-1fd5-4c8d-813b-129677ebb11c/2d539645-1fd5-4c8d-813b-129677ebb11c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1471.368511] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1471.368756] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8903d8e-6a11-4499-b667-c84548500df8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.395545] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1471.395545] env[68244]: value = "task-2781586" [ 1471.395545] env[68244]: _type = "Task" [ 1471.395545] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.403309] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781586, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.646574] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523d4fba-f143-8c4f-dc10-b183271101c8, 'name': SearchDatastore_Task, 'duration_secs': 0.059945} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.646720] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1471.646954] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1471.647197] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.669889] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52a4d0c2-267b-ea31-d795-372fd61296f1, 'name': SearchDatastore_Task, 'duration_secs': 0.049009} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.670638] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f7cece8-bec5-4b60-8a98-00230d8ea2e3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.675822] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1471.675822] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c6fb58-5f25-4400-9817-9a089475ce12" [ 1471.675822] env[68244]: _type = "Task" [ 1471.675822] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.682657] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c6fb58-5f25-4400-9817-9a089475ce12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.905283] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781586, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077321} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.905582] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1471.906276] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218491a8-a0a2-48c2-861b-16f50e5b49b1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.927290] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 2d539645-1fd5-4c8d-813b-129677ebb11c/2d539645-1fd5-4c8d-813b-129677ebb11c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1471.927541] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05aaab39-41ae-49f0-b938-d6c2efd25b7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.947356] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1471.947356] env[68244]: value = "task-2781587" [ 1471.947356] env[68244]: _type = "Task" [ 1471.947356] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.954679] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781587, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.186623] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52c6fb58-5f25-4400-9817-9a089475ce12, 'name': SearchDatastore_Task, 'duration_secs': 0.009167} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.186929] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1472.187113] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 92b166d2-bc8b-44ba-adf0-3286285c7611/92b166d2-bc8b-44ba-adf0-3286285c7611.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1472.187396] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1472.187583] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1472.187793] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5a9d0ab-81a0-42c7-affc-883fcdf1678c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.190041] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca3c3d21-b52c-4d51-86d3-79460bcac317 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.199249] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1472.199249] env[68244]: value = "task-2781588" [ 1472.199249] env[68244]: _type = "Task" [ 1472.199249] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.203070] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1472.203274] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1472.204324] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55e1461e-78bb-4865-8936-42fc7f3cf0f3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.209182] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.212267] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1472.212267] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523dc869-c5a1-5d3c-ad76-546622810933" [ 1472.212267] env[68244]: _type = "Task" [ 1472.212267] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.220048] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523dc869-c5a1-5d3c-ad76-546622810933, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.458371] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781587, 'name': ReconfigVM_Task, 'duration_secs': 0.270388} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.458611] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 2d539645-1fd5-4c8d-813b-129677ebb11c/2d539645-1fd5-4c8d-813b-129677ebb11c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1472.459295] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa7fdf19-87b5-4f96-87eb-29bdc56e4133 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.467356] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1472.467356] env[68244]: value = "task-2781589" [ 1472.467356] env[68244]: _type = "Task" [ 1472.467356] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.480316] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781589, 'name': Rename_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.709963] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781588, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468026} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.710260] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 92b166d2-bc8b-44ba-adf0-3286285c7611/92b166d2-bc8b-44ba-adf0-3286285c7611.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1472.710485] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1472.710773] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-716e3260-7432-4260-b697-3c75be580540 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.721792] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]523dc869-c5a1-5d3c-ad76-546622810933, 'name': SearchDatastore_Task, 'duration_secs': 0.009309} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.722062] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1472.722062] env[68244]: value = "task-2781590" [ 1472.722062] env[68244]: _type = "Task" [ 1472.722062] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.722791] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5e01a04-ebfb-4716-a623-031d556fedab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.731897] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781590, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.733100] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1472.733100] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5235ec48-8402-ba31-7ee6-6c3a8279b02d" [ 1472.733100] env[68244]: _type = "Task" [ 1472.733100] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.740201] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5235ec48-8402-ba31-7ee6-6c3a8279b02d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.977571] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781589, 'name': Rename_Task, 'duration_secs': 0.241906} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.977953] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1472.978128] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a93957ff-3032-4bf2-ab0a-9e2a6c3ccad7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.984038] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1472.984038] env[68244]: value = "task-2781591" [ 1472.984038] env[68244]: _type = "Task" [ 1472.984038] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.991161] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.232454] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781590, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063337} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.232672] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1473.233439] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b0dbc3-b3d8-45ac-9d21-54546222e621 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.255411] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] 92b166d2-bc8b-44ba-adf0-3286285c7611/92b166d2-bc8b-44ba-adf0-3286285c7611.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1473.258694] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a51444e9-c024-4307-b4a2-0c86dbf9972b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.272687] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5235ec48-8402-ba31-7ee6-6c3a8279b02d, 'name': SearchDatastore_Task, 'duration_secs': 0.00943} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.272947] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1473.273223] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1473.273789] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2614a8c4-5702-4973-877c-462ca9bdde87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.277944] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1473.277944] env[68244]: value = "task-2781592" [ 1473.277944] env[68244]: _type = "Task" [ 1473.277944] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.281481] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1473.281481] env[68244]: value = "task-2781593" [ 1473.281481] env[68244]: _type = "Task" [ 1473.281481] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.288283] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781592, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.292752] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.495219] env[68244]: DEBUG oslo_vmware.api [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781591, 'name': PowerOnVM_Task, 'duration_secs': 0.445344} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.495509] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1473.495713] env[68244]: INFO nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Took 6.67 seconds to spawn the instance on the hypervisor. [ 1473.495896] env[68244]: DEBUG nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1473.496755] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb56680-2d99-4922-a8c2-5fe2504b7b5f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.790522] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456831} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.793426] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1473.793644] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1473.793935] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781592, 'name': ReconfigVM_Task, 'duration_secs': 0.413522} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.794124] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19097bee-2312-406a-b18d-f47d90afdfa3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.795760] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Reconfigured VM instance instance-0000007b to attach disk [datastore2] 92b166d2-bc8b-44ba-adf0-3286285c7611/92b166d2-bc8b-44ba-adf0-3286285c7611.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1473.796326] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6109a2aa-e389-43fe-ac58-5fce8aa95f05 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.802150] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1473.802150] env[68244]: value = "task-2781595" [ 1473.802150] env[68244]: _type = "Task" [ 1473.802150] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.803255] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1473.803255] env[68244]: value = "task-2781594" [ 1473.803255] env[68244]: _type = "Task" [ 1473.803255] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.814683] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781594, 'name': Rename_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.817057] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.014705] env[68244]: INFO nova.compute.manager [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Took 11.37 seconds to build instance. [ 1474.314909] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067754} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.317771] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1474.318070] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781594, 'name': Rename_Task, 'duration_secs': 0.132817} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.318727] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8dadf3-d2fe-4dde-95e0-3d102d83a22c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.320939] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1474.321165] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36753fb1-f9b3-49c0-94bb-f69b7976de7e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.340167] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1474.341475] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-209e2e1a-70b7-496d-ad2d-626fdeed1e7d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.356166] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1474.356166] env[68244]: value = "task-2781596" [ 1474.356166] env[68244]: _type = "Task" [ 1474.356166] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.362851] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1474.362851] env[68244]: value = "task-2781597" [ 1474.362851] env[68244]: _type = "Task" [ 1474.362851] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.370679] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781596, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.373710] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781597, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.516541] env[68244]: DEBUG oslo_concurrency.lockutils [None req-7c84a141-597a-4c76-8c5d-fb76ad9bccfc tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.883s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1474.707481] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "2d539645-1fd5-4c8d-813b-129677ebb11c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1474.707814] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1474.708017] env[68244]: DEBUG nova.compute.manager [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1474.708958] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5a9ab7-365f-46c3-8663-81493516c62f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.716120] env[68244]: DEBUG nova.compute.manager [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68244) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1474.716741] env[68244]: DEBUG nova.objects.instance [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'flavor' on Instance uuid 2d539645-1fd5-4c8d-813b-129677ebb11c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1474.870119] env[68244]: DEBUG oslo_vmware.api [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781596, 'name': PowerOnVM_Task, 'duration_secs': 0.440185} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.870776] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1474.871070] env[68244]: INFO nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Took 4.87 seconds to spawn the instance on the hypervisor. [ 1474.871225] env[68244]: DEBUG nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1474.871979] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99c7f04-1261-40ff-87fe-4c84b86fbe3a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.877059] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781597, 'name': ReconfigVM_Task, 'duration_secs': 0.347538} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.877679] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1/40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1474.878250] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5690e843-35eb-48c6-8a7f-5e89064200a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.887588] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1474.887588] env[68244]: value = "task-2781598" [ 1474.887588] env[68244]: _type = "Task" [ 1474.887588] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.895311] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781598, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.393745] env[68244]: INFO nova.compute.manager [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Took 9.60 seconds to build instance. [ 1475.398727] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781598, 'name': Rename_Task, 'duration_secs': 0.413482} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.398859] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1475.399046] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c45b0394-7c3a-4c1e-93b9-d4cf2f1c6dc8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.405683] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1475.405683] env[68244]: value = "task-2781599" [ 1475.405683] env[68244]: _type = "Task" [ 1475.405683] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.415039] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.424938] env[68244]: INFO nova.compute.manager [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Rebuilding instance [ 1475.473708] env[68244]: DEBUG nova.compute.manager [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1475.474933] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1f9403-db60-425c-9271-dea1bee84bdb {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.723154] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1475.723800] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fceb1da2-bc89-4cba-8f79-0d98f635f03d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.731455] env[68244]: DEBUG oslo_vmware.api [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1475.731455] env[68244]: value = "task-2781600" [ 1475.731455] env[68244]: _type = "Task" [ 1475.731455] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.739433] env[68244]: DEBUG oslo_vmware.api [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.896122] env[68244]: DEBUG oslo_concurrency.lockutils [None req-353a5b0d-ecda-4940-bec1-d810c7c198bf tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "92b166d2-bc8b-44ba-adf0-3286285c7611" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.108s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1475.917094] env[68244]: DEBUG oslo_vmware.api [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781599, 'name': PowerOnVM_Task, 'duration_secs': 0.493881} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.917094] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.917094] env[68244]: DEBUG nova.compute.manager [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1475.917689] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2b8e16-9fc5-41c4-8c87-29d1538798f4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.241190] env[68244]: DEBUG oslo_vmware.api [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781600, 'name': PowerOffVM_Task, 'duration_secs': 0.200151} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.241479] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1476.241676] env[68244]: DEBUG nova.compute.manager [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1476.242461] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f217416-fd9a-4a24-9941-d0f1ffeacfb3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.435566] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1476.436123] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1476.436123] env[68244]: DEBUG nova.objects.instance [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1476.489056] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1476.489334] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-320e707f-f47d-4b20-ac28-3fcc558dbb28 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.502740] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1476.502740] env[68244]: value = "task-2781601" [ 1476.502740] env[68244]: _type = "Task" [ 1476.502740] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.512264] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.599165] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1476.599625] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1476.600034] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1476.600364] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1476.600645] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1476.603166] env[68244]: INFO nova.compute.manager [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Terminating instance [ 1476.753600] env[68244]: DEBUG oslo_concurrency.lockutils [None req-8dbe6182-5c52-4504-91c8-9e7233cc0519 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1476.769902] env[68244]: DEBUG nova.compute.manager [None req-7d96b351-9fb2-4c9c-8301-7879e3741139 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1476.770777] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6c29ba-b73c-42a2-b0cf-c136305d6ce0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.840123] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "92b166d2-bc8b-44ba-adf0-3286285c7611" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1476.840397] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "92b166d2-bc8b-44ba-adf0-3286285c7611" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1476.840604] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "92b166d2-bc8b-44ba-adf0-3286285c7611-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1476.840785] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "92b166d2-bc8b-44ba-adf0-3286285c7611-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1476.840955] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "92b166d2-bc8b-44ba-adf0-3286285c7611-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1476.842925] env[68244]: INFO nova.compute.manager [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Terminating instance [ 1477.012674] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781601, 'name': PowerOffVM_Task, 'duration_secs': 0.244756} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.012885] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1477.013630] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.013888] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db21858c-b56a-4e4e-a40e-fec9859716e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.019802] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1477.019802] env[68244]: value = "task-2781602" [ 1477.019802] env[68244]: _type = "Task" [ 1477.019802] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.027074] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781602, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.038500] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "2d539645-1fd5-4c8d-813b-129677ebb11c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.038707] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.038900] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "2d539645-1fd5-4c8d-813b-129677ebb11c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.039105] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.039277] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1477.041219] env[68244]: INFO nova.compute.manager [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Terminating instance [ 1477.108174] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "refresh_cache-40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.108383] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquired lock "refresh_cache-40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1477.108565] env[68244]: DEBUG nova.network.neutron [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.280038] env[68244]: INFO nova.compute.manager [None req-7d96b351-9fb2-4c9c-8301-7879e3741139 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] instance snapshotting [ 1477.280693] env[68244]: DEBUG nova.objects.instance [None req-7d96b351-9fb2-4c9c-8301-7879e3741139 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lazy-loading 'flavor' on Instance uuid 92b166d2-bc8b-44ba-adf0-3286285c7611 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1477.346117] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "refresh_cache-92b166d2-bc8b-44ba-adf0-3286285c7611" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.346344] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquired lock "refresh_cache-92b166d2-bc8b-44ba-adf0-3286285c7611" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1477.346524] env[68244]: DEBUG nova.network.neutron [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.443950] env[68244]: DEBUG oslo_concurrency.lockutils [None req-928c4738-6ef5-448d-91c4-138ec6c21cf9 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1477.530194] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] VM already powered off {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1477.530380] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1477.530634] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559202', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'name': 'volume-f73eeacc-040a-4905-bf66-efaacecff4fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff690eea-6e5a-42a3-bf85-1b844425df2a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'serial': 'f73eeacc-040a-4905-bf66-efaacecff4fe'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1477.531447] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7c8911-f481-4abe-a373-74922f39c55d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.549798] env[68244]: DEBUG nova.compute.manager [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1477.550227] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.551452] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c639605b-f9f5-4607-ab84-536e2c4e3bf6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.555661] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6583d76-38f1-40b7-bcbf-8b66e0462e62 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.563798] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1477.565989] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e91c06d7-fb6f-4343-9864-320f8aa5cee9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.568224] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093c1bc6-cc21-4197-a905-616193ccf648 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.588693] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f2ed5e-35b0-492c-bbc4-63184372153a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.605544] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] The volume has not been displaced from its original location: [datastore2] volume-f73eeacc-040a-4905-bf66-efaacecff4fe/volume-f73eeacc-040a-4905-bf66-efaacecff4fe.vmdk. No consolidation needed. {{(pid=68244) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1477.610704] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1477.612961] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abdefd17-14b5-4565-bb29-49abf43f9cad {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.631071] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1477.631071] env[68244]: value = "task-2781604" [ 1477.631071] env[68244]: _type = "Task" [ 1477.631071] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.632058] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1477.632270] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1477.632446] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleting the datastore file [datastore2] 2d539645-1fd5-4c8d-813b-129677ebb11c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.635859] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d056976-2f97-49a8-8f3b-693b5b855167 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.642253] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781604, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.643041] env[68244]: DEBUG nova.network.neutron [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1477.645757] env[68244]: DEBUG oslo_vmware.api [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1477.645757] env[68244]: value = "task-2781605" [ 1477.645757] env[68244]: _type = "Task" [ 1477.645757] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.653582] env[68244]: DEBUG oslo_vmware.api [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.701845] env[68244]: DEBUG nova.network.neutron [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.785930] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617a9351-b052-454a-8bac-20081b986223 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.802620] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1af53ec-7160-4372-bf6e-d5afa1cd2888 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.864850] env[68244]: DEBUG nova.network.neutron [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1477.906563] env[68244]: DEBUG nova.network.neutron [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.142368] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781604, 'name': ReconfigVM_Task, 'duration_secs': 0.200773} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.142657] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1478.147316] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ae51591-8391-46b4-b647-0c6d56e5daf1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.164421] env[68244]: DEBUG oslo_vmware.api [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155745} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.165534] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.165721] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.165899] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.166086] env[68244]: INFO nova.compute.manager [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1478.166324] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1478.166565] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1478.166565] env[68244]: value = "task-2781606" [ 1478.166565] env[68244]: _type = "Task" [ 1478.166565] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.166743] env[68244]: DEBUG nova.compute.manager [-] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1478.166838] env[68244]: DEBUG nova.network.neutron [-] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.175974] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.205943] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Releasing lock "refresh_cache-40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1478.206324] env[68244]: DEBUG nova.compute.manager [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1478.206581] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.207541] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a93d38b-21e3-4b14-b670-4cb18c162fe9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.215134] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1478.215388] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6536828-dee8-482c-b0ce-7374924f2eb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.221470] env[68244]: DEBUG oslo_vmware.api [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1478.221470] env[68244]: value = "task-2781607" [ 1478.221470] env[68244]: _type = "Task" [ 1478.221470] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.231987] env[68244]: DEBUG oslo_vmware.api [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781607, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.312593] env[68244]: DEBUG nova.compute.manager [None req-7d96b351-9fb2-4c9c-8301-7879e3741139 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Instance disappeared during snapshot {{(pid=68244) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1478.409028] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Releasing lock "refresh_cache-92b166d2-bc8b-44ba-adf0-3286285c7611" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1478.409489] env[68244]: DEBUG nova.compute.manager [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1478.409680] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.410593] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b34b1f3-cdc0-4448-9c5b-4045efc2dcd1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.419503] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1478.422632] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abb69dcf-ef56-47d0-8a8e-a2ce8a412718 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.433133] env[68244]: DEBUG oslo_vmware.api [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1478.433133] env[68244]: value = "task-2781608" [ 1478.433133] env[68244]: _type = "Task" [ 1478.433133] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.443828] env[68244]: DEBUG oslo_vmware.api [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.445979] env[68244]: DEBUG nova.compute.manager [req-813c7636-a5f0-476f-a712-4f37744d5ad0 req-fcd4ec3b-7d40-49e5-afcd-468a1cfdc9bf service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Received event network-vif-deleted-688b685f-f503-4ab8-b517-385ace82b8d8 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1478.446343] env[68244]: INFO nova.compute.manager [req-813c7636-a5f0-476f-a712-4f37744d5ad0 req-fcd4ec3b-7d40-49e5-afcd-468a1cfdc9bf service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Neutron deleted interface 688b685f-f503-4ab8-b517-385ace82b8d8; detaching it from the instance and deleting it from the info cache [ 1478.446343] env[68244]: DEBUG nova.network.neutron [req-813c7636-a5f0-476f-a712-4f37744d5ad0 req-fcd4ec3b-7d40-49e5-afcd-468a1cfdc9bf service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.467948] env[68244]: DEBUG nova.compute.manager [None req-7d96b351-9fb2-4c9c-8301-7879e3741139 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Found 0 images (rotation: 2) {{(pid=68244) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1478.678077] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781606, 'name': ReconfigVM_Task, 'duration_secs': 0.107692} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.678265] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559202', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'name': 'volume-f73eeacc-040a-4905-bf66-efaacecff4fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff690eea-6e5a-42a3-bf85-1b844425df2a', 'attached_at': '', 'detached_at': '', 'volume_id': 'f73eeacc-040a-4905-bf66-efaacecff4fe', 'serial': 'f73eeacc-040a-4905-bf66-efaacecff4fe'} {{(pid=68244) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1478.678444] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.679218] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12b7fed-16ae-4920-a1eb-6e79b63b973b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.685545] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.685764] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91e9faf4-b2f7-42ed-ad0f-426c42d264b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.732674] env[68244]: DEBUG oslo_vmware.api [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781607, 'name': PowerOffVM_Task, 'duration_secs': 0.097758} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.732909] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.733092] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.733329] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0664c47-9c9f-4fee-9807-69db3abaa324 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.757399] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.757582] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.757771] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Deleting the datastore file [datastore2] ff690eea-6e5a-42a3-bf85-1b844425df2a {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.758040] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e7c7b73-06c7-462f-93d8-43a59598cd5c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.762495] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.762495] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.762607] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Deleting the datastore file [datastore2] 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.762747] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18e71143-0cf0-4692-bb3a-1c1e84bbf3db {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.766626] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for the task: (returnval){ [ 1478.766626] env[68244]: value = "task-2781611" [ 1478.766626] env[68244]: _type = "Task" [ 1478.766626] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.770497] env[68244]: DEBUG oslo_vmware.api [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for the task: (returnval){ [ 1478.770497] env[68244]: value = "task-2781612" [ 1478.770497] env[68244]: _type = "Task" [ 1478.770497] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.776619] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.781298] env[68244]: DEBUG oslo_vmware.api [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.925285] env[68244]: DEBUG nova.network.neutron [-] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.944045] env[68244]: DEBUG oslo_vmware.api [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781608, 'name': PowerOffVM_Task, 'duration_secs': 0.161646} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.944518] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.944518] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.944709] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c76daebf-8232-4be9-b394-11802b1c8caf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.948921] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af28b409-e59d-42ff-88f2-2dab7a04d79e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.957401] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13ff701-2494-4aef-b5ba-b6a62bac7b20 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.969441] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.969647] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.969893] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Deleting the datastore file [datastore2] 92b166d2-bc8b-44ba-adf0-3286285c7611 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.970330] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3fc8a20-628c-4f6c-9f4a-242870ebf14a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.975823] env[68244]: DEBUG oslo_vmware.api [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for the task: (returnval){ [ 1478.975823] env[68244]: value = "task-2781614" [ 1478.975823] env[68244]: _type = "Task" [ 1478.975823] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.984409] env[68244]: DEBUG nova.compute.manager [req-813c7636-a5f0-476f-a712-4f37744d5ad0 req-fcd4ec3b-7d40-49e5-afcd-468a1cfdc9bf service nova] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Detach interface failed, port_id=688b685f-f503-4ab8-b517-385ace82b8d8, reason: Instance 2d539645-1fd5-4c8d-813b-129677ebb11c could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1478.987358] env[68244]: DEBUG oslo_vmware.api [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.277487] env[68244]: DEBUG oslo_vmware.api [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Task: {'id': task-2781611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089592} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.278094] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.278331] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.278594] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.283151] env[68244]: DEBUG oslo_vmware.api [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Task: {'id': task-2781612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099233} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.283151] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.283311] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.283470] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.283794] env[68244]: INFO nova.compute.manager [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1479.283863] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1479.286017] env[68244]: DEBUG nova.compute.manager [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1479.286127] env[68244]: DEBUG nova.network.neutron [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.302839] env[68244]: DEBUG nova.network.neutron [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1479.339295] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Volume detach. Driver type: vmdk {{(pid=68244) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1479.339666] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c9842f8-1113-401f-97a0-e02968d5984c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.349869] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e530917-2094-4b83-91c3-80648da173b7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.378898] env[68244]: ERROR nova.compute.manager [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Failed to detach volume f73eeacc-040a-4905-bf66-efaacecff4fe from /dev/sda: nova.exception.InstanceNotFound: Instance ff690eea-6e5a-42a3-bf85-1b844425df2a could not be found. [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Traceback (most recent call last): [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self.driver.rebuild(**kwargs) [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] raise NotImplementedError() [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] NotImplementedError [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] During handling of the above exception, another exception occurred: [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Traceback (most recent call last): [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self.driver.detach_volume(context, old_connection_info, [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] return self._volumeops.detach_volume(connection_info, instance) [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self._detach_volume_vmdk(connection_info, instance) [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] stable_ref.fetch_moref(session) [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] nova.exception.InstanceNotFound: Instance ff690eea-6e5a-42a3-bf85-1b844425df2a could not be found. [ 1479.378898] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.428643] env[68244]: INFO nova.compute.manager [-] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Took 1.26 seconds to deallocate network for instance. [ 1479.486274] env[68244]: DEBUG oslo_vmware.api [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Task: {'id': task-2781614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089407} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.488627] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.488820] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.488998] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.489193] env[68244]: INFO nova.compute.manager [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1479.489424] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1479.489796] env[68244]: DEBUG nova.compute.manager [-] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1479.489894] env[68244]: DEBUG nova.network.neutron [-] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.504778] env[68244]: DEBUG nova.network.neutron [-] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1479.523419] env[68244]: DEBUG nova.compute.utils [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Build of instance ff690eea-6e5a-42a3-bf85-1b844425df2a aborted: Failed to rebuild volume backed instance. {{(pid=68244) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1479.525928] env[68244]: ERROR nova.compute.manager [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance ff690eea-6e5a-42a3-bf85-1b844425df2a aborted: Failed to rebuild volume backed instance. [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Traceback (most recent call last): [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self.driver.rebuild(**kwargs) [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] raise NotImplementedError() [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] NotImplementedError [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] During handling of the above exception, another exception occurred: [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Traceback (most recent call last): [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self._detach_root_volume(context, instance, root_bdm) [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] with excutils.save_and_reraise_exception(): [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self.force_reraise() [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] raise self.value [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self.driver.detach_volume(context, old_connection_info, [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] return self._volumeops.detach_volume(connection_info, instance) [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self._detach_volume_vmdk(connection_info, instance) [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] stable_ref.fetch_moref(session) [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] nova.exception.InstanceNotFound: Instance ff690eea-6e5a-42a3-bf85-1b844425df2a could not be found. [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] During handling of the above exception, another exception occurred: [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Traceback (most recent call last): [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 11390, in _error_out_instance_on_exception [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] yield [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1479.525928] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self._do_rebuild_instance_with_claim( [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self._do_rebuild_instance( [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self._rebuild_default_impl(**kwargs) [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] self._rebuild_volume_backed_instance( [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] raise exception.BuildAbortException( [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] nova.exception.BuildAbortException: Build of instance ff690eea-6e5a-42a3-bf85-1b844425df2a aborted: Failed to rebuild volume backed instance. [ 1479.527296] env[68244]: ERROR nova.compute.manager [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] [ 1479.806567] env[68244]: DEBUG nova.network.neutron [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.934626] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1479.934920] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1479.935199] env[68244]: DEBUG nova.objects.instance [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'resources' on Instance uuid 2d539645-1fd5-4c8d-813b-129677ebb11c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1480.007384] env[68244]: DEBUG nova.network.neutron [-] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.309172] env[68244]: INFO nova.compute.manager [-] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Took 1.02 seconds to deallocate network for instance. [ 1480.481535] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f962de31-76e0-4422-87f8-4bedfe7d017c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.488783] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f330de-8960-4baf-b845-cf9d83092de6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.518797] env[68244]: INFO nova.compute.manager [-] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Took 1.03 seconds to deallocate network for instance. [ 1480.520917] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861c119a-5508-4be7-83fd-75e1805f2e92 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.530011] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8552ad-00c2-4ec7-a2a7-6b509b8153da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.544403] env[68244]: DEBUG nova.compute.provider_tree [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1480.814946] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.027399] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.047676] env[68244]: DEBUG nova.scheduler.client.report [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1481.540772] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.552675] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1481.555007] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.740s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1481.555247] env[68244]: DEBUG nova.objects.instance [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lazy-loading 'resources' on Instance uuid 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1481.570113] env[68244]: INFO nova.scheduler.client.report [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted allocations for instance 2d539645-1fd5-4c8d-813b-129677ebb11c [ 1482.076388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-bb967401-b8c0-4192-b3ff-d04e7105053c tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "2d539645-1fd5-4c8d-813b-129677ebb11c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.037s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1482.093801] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bee4f66-a7e1-4c17-ba1d-791a72e12029 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.101310] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d43903f-3247-4d4b-a872-5f7a9bd8c769 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.131885] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66593a7-ccb9-4d20-9225-8e39e86d583d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.138780] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded48786-16e4-4c52-bf8e-2175576332a9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.152069] env[68244]: DEBUG nova.compute.provider_tree [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.423065] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquiring lock "ff690eea-6e5a-42a3-bf85-1b844425df2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1482.423331] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1482.423541] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquiring lock "ff690eea-6e5a-42a3-bf85-1b844425df2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1482.423725] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1482.423896] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1482.426441] env[68244]: INFO nova.compute.manager [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Terminating instance [ 1482.655467] env[68244]: DEBUG nova.scheduler.client.report [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1482.858129] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1482.858366] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1482.929877] env[68244]: DEBUG nova.compute.manager [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1482.930273] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd7cbc14-fcab-452a-874e-750443fcc97a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.939934] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddbc6cb-253b-45f1-b2a2-ab44b7ffaabc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.964254] env[68244]: WARNING nova.virt.vmwareapi.driver [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance ff690eea-6e5a-42a3-bf85-1b844425df2a could not be found. [ 1482.964588] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1482.965064] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5da9248-e613-4973-89c5-e6c94de23932 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.975876] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ace8a7c-f9c7-42a3-87c1-1de58a33774f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.004979] env[68244]: WARNING nova.virt.vmwareapi.vmops [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ff690eea-6e5a-42a3-bf85-1b844425df2a could not be found. [ 1483.005224] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1483.005403] env[68244]: INFO nova.compute.manager [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1483.005646] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1483.005858] env[68244]: DEBUG nova.compute.manager [-] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1483.005950] env[68244]: DEBUG nova.network.neutron [-] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1483.160704] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.163062] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.136s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1483.163310] env[68244]: DEBUG nova.objects.instance [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lazy-loading 'resources' on Instance uuid 92b166d2-bc8b-44ba-adf0-3286285c7611 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1483.185559] env[68244]: INFO nova.scheduler.client.report [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Deleted allocations for instance 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1 [ 1483.360779] env[68244]: DEBUG nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1483.635323] env[68244]: DEBUG nova.compute.manager [req-e46a4817-88f6-44d5-acb0-d18c2c4f7cad req-bd1f05a9-9d77-4094-bc65-f12354011f8e service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Received event network-vif-deleted-b82deb5a-5d45-48f8-977f-9e51a4ee39c9 {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1483.635514] env[68244]: INFO nova.compute.manager [req-e46a4817-88f6-44d5-acb0-d18c2c4f7cad req-bd1f05a9-9d77-4094-bc65-f12354011f8e service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Neutron deleted interface b82deb5a-5d45-48f8-977f-9e51a4ee39c9; detaching it from the instance and deleting it from the info cache [ 1483.635682] env[68244]: DEBUG nova.network.neutron [req-e46a4817-88f6-44d5-acb0-d18c2c4f7cad req-bd1f05a9-9d77-4094-bc65-f12354011f8e service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.696014] env[68244]: DEBUG oslo_concurrency.lockutils [None req-f921bbce-46d4-4224-b48a-8cf315555c10 tempest-ServerShowV257Test-2144843012 tempest-ServerShowV257Test-2144843012-project-member] Lock "40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.096s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.716163] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22afcc2-c47a-4a10-a341-b7bc14117777 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.725580] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0922d81-45c8-4113-acdb-918928b56971 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.760825] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859a7757-4c5c-4913-be47-9db16d1fc172 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.767711] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbf43f6-7fb5-437b-b45b-1775e9d2236a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.783846] env[68244]: DEBUG nova.compute.provider_tree [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.882689] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1484.116337] env[68244]: DEBUG nova.network.neutron [-] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.140811] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eafa4b00-3761-4347-8c51-a132c320a9fc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.151239] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5a3a56-c5af-4309-b1c3-14cc02fd210b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.176463] env[68244]: DEBUG nova.compute.manager [req-e46a4817-88f6-44d5-acb0-d18c2c4f7cad req-bd1f05a9-9d77-4094-bc65-f12354011f8e service nova] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Detach interface failed, port_id=b82deb5a-5d45-48f8-977f-9e51a4ee39c9, reason: Instance ff690eea-6e5a-42a3-bf85-1b844425df2a could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1484.287768] env[68244]: DEBUG nova.scheduler.client.report [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1484.619008] env[68244]: INFO nova.compute.manager [-] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Took 1.61 seconds to deallocate network for instance. [ 1484.793103] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.630s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.796187] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.255s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1484.817790] env[68244]: INFO nova.scheduler.client.report [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Deleted allocations for instance 92b166d2-bc8b-44ba-adf0-3286285c7611 [ 1484.831641] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e3339c-5c38-463f-833e-6691322acd1e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.839962] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8236f529-256a-4aef-aec2-23c0992a0197 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.871988] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a1c103-dea7-439b-8ff3-2243ba36c42c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.879816] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bb75e4-384d-4aa8-94c4-f26fadea3c2a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.893121] env[68244]: DEBUG nova.compute.provider_tree [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.163084] env[68244]: INFO nova.compute.manager [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Took 0.54 seconds to detach 1 volumes for instance. [ 1485.165537] env[68244]: DEBUG nova.compute.manager [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Deleting volume: f73eeacc-040a-4905-bf66-efaacecff4fe {{(pid=68244) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1485.326523] env[68244]: DEBUG oslo_concurrency.lockutils [None req-568b7b2b-6fc6-4369-b25c-6746cac9cbb8 tempest-ServersAaction247Test-275933727 tempest-ServersAaction247Test-275933727-project-member] Lock "92b166d2-bc8b-44ba-adf0-3286285c7611" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.486s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1485.396340] env[68244]: DEBUG nova.scheduler.client.report [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1485.717644] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1485.902839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.107s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1485.902839] env[68244]: INFO nova.compute.manager [None req-9e2ce6ea-ac57-449b-bef8-11f9a4a53590 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Successfully reverted task state from rebuilding on failure for instance. [ 1485.908504] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.026s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1485.910008] env[68244]: INFO nova.compute.claims [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.952131] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450b7562-efc7-41c1-a6f0-657a07fe197f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.958646] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a01d62f-0e9b-4e75-844f-10046eff1459 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.989087] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7756e824-fc61-4959-bb4f-ddba85ada22a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.996137] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5624c0d9-ccea-4bdc-8244-adf108b06510 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.009215] env[68244]: DEBUG nova.compute.provider_tree [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1487.513030] env[68244]: DEBUG nova.scheduler.client.report [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1488.017933] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.109s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1488.018545] env[68244]: DEBUG nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1488.021435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.304s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1488.021647] env[68244]: DEBUG nova.objects.instance [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lazy-loading 'resources' on Instance uuid ff690eea-6e5a-42a3-bf85-1b844425df2a {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.527817] env[68244]: DEBUG nova.compute.utils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1488.529356] env[68244]: DEBUG nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1488.529521] env[68244]: DEBUG nova.network.neutron [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1488.555786] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef506b8-7718-4c35-b390-e4eebc729f30 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.563595] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fe99db-00a0-4072-aeb5-ea18334b247a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.593635] env[68244]: DEBUG nova.policy [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a17d79ef7b14c178c98a60499967c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e3ca107ec07495cb1876bd472e0cd8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1488.595534] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e883a6b7-df22-4a27-8e4e-3bc89dab518a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.602535] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19719c2d-59f3-46e2-bd4e-82ed01cdf904 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.616936] env[68244]: DEBUG nova.compute.provider_tree [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.862215] env[68244]: DEBUG nova.network.neutron [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Successfully created port: da91d9a0-05ed-4bef-b682-76e0ca92f6ed {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1489.032440] env[68244]: DEBUG nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1489.121024] env[68244]: DEBUG nova.scheduler.client.report [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.625561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.604s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.042248] env[68244]: DEBUG nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1490.074390] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1490.074636] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1490.074791] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1490.074968] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1490.075127] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1490.075273] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1490.075480] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1490.075634] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1490.075796] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1490.075954] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1490.076134] env[68244]: DEBUG nova.virt.hardware [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1490.077031] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc341a51-8812-4264-9926-da43e4cc54fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.085146] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681b3b89-3b6c-4928-a74e-19d0a429c96b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.142577] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5546923f-d780-4843-b698-cc12b57e8d33 tempest-ServerActionsV293TestJSON-25790882 tempest-ServerActionsV293TestJSON-25790882-project-member] Lock "ff690eea-6e5a-42a3-bf85-1b844425df2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.719s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.211857] env[68244]: DEBUG nova.compute.manager [req-610ed8b8-0582-49a0-aada-818ce5f89109 req-bcddd53c-50f5-4b69-99be-92980a1d5d14 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Received event network-vif-plugged-da91d9a0-05ed-4bef-b682-76e0ca92f6ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1490.212333] env[68244]: DEBUG oslo_concurrency.lockutils [req-610ed8b8-0582-49a0-aada-818ce5f89109 req-bcddd53c-50f5-4b69-99be-92980a1d5d14 service nova] Acquiring lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1490.212588] env[68244]: DEBUG oslo_concurrency.lockutils [req-610ed8b8-0582-49a0-aada-818ce5f89109 req-bcddd53c-50f5-4b69-99be-92980a1d5d14 service nova] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1490.212795] env[68244]: DEBUG oslo_concurrency.lockutils [req-610ed8b8-0582-49a0-aada-818ce5f89109 req-bcddd53c-50f5-4b69-99be-92980a1d5d14 service nova] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.212943] env[68244]: DEBUG nova.compute.manager [req-610ed8b8-0582-49a0-aada-818ce5f89109 req-bcddd53c-50f5-4b69-99be-92980a1d5d14 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] No waiting events found dispatching network-vif-plugged-da91d9a0-05ed-4bef-b682-76e0ca92f6ed {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1490.213136] env[68244]: WARNING nova.compute.manager [req-610ed8b8-0582-49a0-aada-818ce5f89109 req-bcddd53c-50f5-4b69-99be-92980a1d5d14 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Received unexpected event network-vif-plugged-da91d9a0-05ed-4bef-b682-76e0ca92f6ed for instance with vm_state building and task_state spawning. [ 1490.318099] env[68244]: DEBUG nova.network.neutron [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Successfully updated port: da91d9a0-05ed-4bef-b682-76e0ca92f6ed {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1490.793753] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-e2f0d059-6415-4a90-98f7-99fe67e8d81c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.793921] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-e2f0d059-6415-4a90-98f7-99fe67e8d81c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1490.794136] env[68244]: DEBUG nova.network.neutron [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1491.325566] env[68244]: DEBUG nova.network.neutron [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1491.438748] env[68244]: DEBUG nova.network.neutron [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Updating instance_info_cache with network_info: [{"id": "da91d9a0-05ed-4bef-b682-76e0ca92f6ed", "address": "fa:16:3e:e6:be:cd", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda91d9a0-05", "ovs_interfaceid": "da91d9a0-05ed-4bef-b682-76e0ca92f6ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.941385] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-e2f0d059-6415-4a90-98f7-99fe67e8d81c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1491.941711] env[68244]: DEBUG nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Instance network_info: |[{"id": "da91d9a0-05ed-4bef-b682-76e0ca92f6ed", "address": "fa:16:3e:e6:be:cd", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda91d9a0-05", "ovs_interfaceid": "da91d9a0-05ed-4bef-b682-76e0ca92f6ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1491.942168] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:be:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da91d9a0-05ed-4bef-b682-76e0ca92f6ed', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1491.949746] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1491.949960] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1491.950212] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73b357e8-f768-4eb6-ad63-5cf051d9459b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.971501] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1491.971501] env[68244]: value = "task-2781616" [ 1491.971501] env[68244]: _type = "Task" [ 1491.971501] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.978641] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781616, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.236805] env[68244]: DEBUG nova.compute.manager [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Received event network-changed-da91d9a0-05ed-4bef-b682-76e0ca92f6ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1492.236994] env[68244]: DEBUG nova.compute.manager [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Refreshing instance network info cache due to event network-changed-da91d9a0-05ed-4bef-b682-76e0ca92f6ed. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1492.237226] env[68244]: DEBUG oslo_concurrency.lockutils [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] Acquiring lock "refresh_cache-e2f0d059-6415-4a90-98f7-99fe67e8d81c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.237368] env[68244]: DEBUG oslo_concurrency.lockutils [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] Acquired lock "refresh_cache-e2f0d059-6415-4a90-98f7-99fe67e8d81c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1492.237530] env[68244]: DEBUG nova.network.neutron [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Refreshing network info cache for port da91d9a0-05ed-4bef-b682-76e0ca92f6ed {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.481094] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781616, 'name': CreateVM_Task, 'duration_secs': 0.418985} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.481402] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1492.481895] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.482083] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1492.482409] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1492.482660] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0bfdc68-a3ab-4ec7-9ad5-8dfaff45d971 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.486908] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1492.486908] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525051e0-8d1a-6ba4-c3b9-6031bc07f119" [ 1492.486908] env[68244]: _type = "Task" [ 1492.486908] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.493953] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525051e0-8d1a-6ba4-c3b9-6031bc07f119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.909609] env[68244]: DEBUG nova.network.neutron [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Updated VIF entry in instance network info cache for port da91d9a0-05ed-4bef-b682-76e0ca92f6ed. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1492.909958] env[68244]: DEBUG nova.network.neutron [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Updating instance_info_cache with network_info: [{"id": "da91d9a0-05ed-4bef-b682-76e0ca92f6ed", "address": "fa:16:3e:e6:be:cd", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda91d9a0-05", "ovs_interfaceid": "da91d9a0-05ed-4bef-b682-76e0ca92f6ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.997094] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525051e0-8d1a-6ba4-c3b9-6031bc07f119, 'name': SearchDatastore_Task, 'duration_secs': 0.013857} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.997388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1492.997608] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1492.997839] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.997985] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1492.998180] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.998431] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1e49899-9154-41d8-a1a5-8588841c11c5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.006769] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1493.006982] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1493.007688] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f79f850-9d37-49e6-97ad-c65afac3710a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.012162] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1493.012162] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527f3418-be22-e301-bac6-e833200fe43e" [ 1493.012162] env[68244]: _type = "Task" [ 1493.012162] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.019210] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527f3418-be22-e301-bac6-e833200fe43e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.077235] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.412805] env[68244]: DEBUG oslo_concurrency.lockutils [req-e694746d-285b-4f2d-bdfc-422cddd9db9d req-c2d118b6-2117-47b5-853d-0b91ca3e7512 service nova] Releasing lock "refresh_cache-e2f0d059-6415-4a90-98f7-99fe67e8d81c" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1493.522900] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]527f3418-be22-e301-bac6-e833200fe43e, 'name': SearchDatastore_Task, 'duration_secs': 0.007954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.523678] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e30cae1a-d43f-4ad3-b8f2-07a714553331 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.529311] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1493.529311] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52755c33-0afe-e267-25a7-00796194056f" [ 1493.529311] env[68244]: _type = "Task" [ 1493.529311] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.536616] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52755c33-0afe-e267-25a7-00796194056f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.039269] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52755c33-0afe-e267-25a7-00796194056f, 'name': SearchDatastore_Task, 'duration_secs': 0.009667} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.039550] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1494.039802] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e2f0d059-6415-4a90-98f7-99fe67e8d81c/e2f0d059-6415-4a90-98f7-99fe67e8d81c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1494.040067] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0cfdbe8-21be-4114-aa9b-2186b9e5d6a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.047219] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1494.047219] env[68244]: value = "task-2781617" [ 1494.047219] env[68244]: _type = "Task" [ 1494.047219] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.054259] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.556932] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434954} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.557319] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] e2f0d059-6415-4a90-98f7-99fe67e8d81c/e2f0d059-6415-4a90-98f7-99fe67e8d81c.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1494.557529] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1494.557827] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26f078e3-7ade-4ffc-9b05-f5781e7d56c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.566225] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1494.566225] env[68244]: value = "task-2781618" [ 1494.566225] env[68244]: _type = "Task" [ 1494.566225] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.573855] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.075826] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781618, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06097} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.076103] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1495.076851] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cb0091-f899-49ad-b6c9-55f36ad4e0e2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.097700] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] e2f0d059-6415-4a90-98f7-99fe67e8d81c/e2f0d059-6415-4a90-98f7-99fe67e8d81c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1495.097933] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8e345d9-eb05-4e32-8801-460a4146b4c4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.116972] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1495.116972] env[68244]: value = "task-2781619" [ 1495.116972] env[68244]: _type = "Task" [ 1495.116972] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.123961] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781619, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.626787] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781619, 'name': ReconfigVM_Task, 'duration_secs': 0.288423} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.627099] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Reconfigured VM instance instance-0000007c to attach disk [datastore2] e2f0d059-6415-4a90-98f7-99fe67e8d81c/e2f0d059-6415-4a90-98f7-99fe67e8d81c.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1495.627680] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f1311e2-fd3d-4407-b2b6-f30af41d0c64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.633686] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1495.633686] env[68244]: value = "task-2781620" [ 1495.633686] env[68244]: _type = "Task" [ 1495.633686] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.640766] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781620, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.145467] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781620, 'name': Rename_Task, 'duration_secs': 0.142775} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.145881] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1496.146229] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-965c2ccd-6836-42a9-a3c0-0de4c291e647 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.151916] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1496.151916] env[68244]: value = "task-2781621" [ 1496.151916] env[68244]: _type = "Task" [ 1496.151916] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.163208] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781621, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.661539] env[68244]: DEBUG oslo_vmware.api [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781621, 'name': PowerOnVM_Task, 'duration_secs': 0.450442} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.661837] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1496.662047] env[68244]: INFO nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1496.662262] env[68244]: DEBUG nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1496.663051] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df94a2e-b07a-4dee-a9ac-deadda034e5c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.077656] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1497.178531] env[68244]: INFO nova.compute.manager [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Took 13.31 seconds to build instance. [ 1497.681146] env[68244]: DEBUG oslo_concurrency.lockutils [None req-90ef4798-1126-400d-ac80-8e3b1fb14347 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.823s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1498.078233] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1498.078447] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Cleaning up deleted instances with incomplete migration {{(pid=68244) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1498.335010] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce18fc43-461b-497e-b99c-2353cad44e58 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.343174] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ed0dee69-9f1a-4375-ade9-56d716498e49 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Suspending the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1498.343407] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a553ad9c-3d6f-4c1c-ad87-3126f4519863 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.350385] env[68244]: DEBUG oslo_vmware.api [None req-ed0dee69-9f1a-4375-ade9-56d716498e49 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1498.350385] env[68244]: value = "task-2781622" [ 1498.350385] env[68244]: _type = "Task" [ 1498.350385] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.357718] env[68244]: DEBUG oslo_vmware.api [None req-ed0dee69-9f1a-4375-ade9-56d716498e49 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781622, 'name': SuspendVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.581243] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1498.860780] env[68244]: DEBUG oslo_vmware.api [None req-ed0dee69-9f1a-4375-ade9-56d716498e49 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781622, 'name': SuspendVM_Task} progress is 70%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.361152] env[68244]: DEBUG oslo_vmware.api [None req-ed0dee69-9f1a-4375-ade9-56d716498e49 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781622, 'name': SuspendVM_Task, 'duration_secs': 0.59408} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.361411] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ed0dee69-9f1a-4375-ade9-56d716498e49 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Suspended the VM {{(pid=68244) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1499.361590] env[68244]: DEBUG nova.compute.manager [None req-ed0dee69-9f1a-4375-ade9-56d716498e49 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1499.362340] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d70faf7-084c-42bc-941f-0b147a6fcef6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.083985] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1500.587675] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.587888] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.588078] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1500.588243] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1500.589152] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9530778a-e649-4189-a15e-47bd39685e4b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.597923] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18550616-8234-45e7-9ac9-33780d5eceb7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.611247] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f5323a-49ea-4947-9255-e2dad7224aca {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.617397] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2712ac07-67bf-40dd-8334-c58f84e24a66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.647453] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180890MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1500.647589] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.647790] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.680018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.680260] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.680460] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.680639] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.680805] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1500.682648] env[68244]: INFO nova.compute.manager [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Terminating instance [ 1501.186577] env[68244]: DEBUG nova.compute.manager [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1501.186904] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1501.188038] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7532740-bd91-472d-aaea-6096515487cd {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.195423] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1501.195646] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0faf6f9b-57de-47cd-985b-b8a46f96053d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.273078] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1501.273321] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1501.273502] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleting the datastore file [datastore2] e2f0d059-6415-4a90-98f7-99fe67e8d81c {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1501.273765] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-385107cf-658c-4475-b13b-978783e30434 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.280224] env[68244]: DEBUG oslo_vmware.api [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1501.280224] env[68244]: value = "task-2781624" [ 1501.280224] env[68244]: _type = "Task" [ 1501.280224] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.287875] env[68244]: DEBUG oslo_vmware.api [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.670085] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance e2f0d059-6415-4a90-98f7-99fe67e8d81c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1501.670305] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1501.670447] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1501.694667] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74eae3e5-78ec-4338-a1d2-8729d21d65a1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.701786] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b7b6f5-7328-45a3-a6a7-6d0d11c8eb66 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.731509] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e785bb7-8a3b-44cb-8de2-22102ea29bd8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.738182] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c6363f-e0e9-4801-80cb-1bb1c7042236 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.750987] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.789204] env[68244]: DEBUG oslo_vmware.api [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184447} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.789436] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1501.789617] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1501.789790] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1501.789973] env[68244]: INFO nova.compute.manager [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1501.790228] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1501.790415] env[68244]: DEBUG nova.compute.manager [-] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1501.790509] env[68244]: DEBUG nova.network.neutron [-] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1502.061951] env[68244]: DEBUG nova.compute.manager [req-b2d4ae43-8ffe-4d88-a4c1-46233ea9e17a req-d865bb94-0703-430e-a156-0face7fa5a46 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Received event network-vif-deleted-da91d9a0-05ed-4bef-b682-76e0ca92f6ed {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1502.062181] env[68244]: INFO nova.compute.manager [req-b2d4ae43-8ffe-4d88-a4c1-46233ea9e17a req-d865bb94-0703-430e-a156-0face7fa5a46 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Neutron deleted interface da91d9a0-05ed-4bef-b682-76e0ca92f6ed; detaching it from the instance and deleting it from the info cache [ 1502.062357] env[68244]: DEBUG nova.network.neutron [req-b2d4ae43-8ffe-4d88-a4c1-46233ea9e17a req-d865bb94-0703-430e-a156-0face7fa5a46 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.254013] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1502.543874] env[68244]: DEBUG nova.network.neutron [-] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.565370] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-276f80c9-72b1-4d7e-a109-4324314dca94 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.575390] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf63604d-d9cb-4fd8-879a-ad566572cef4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.599264] env[68244]: DEBUG nova.compute.manager [req-b2d4ae43-8ffe-4d88-a4c1-46233ea9e17a req-d865bb94-0703-430e-a156-0face7fa5a46 service nova] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Detach interface failed, port_id=da91d9a0-05ed-4bef-b682-76e0ca92f6ed, reason: Instance e2f0d059-6415-4a90-98f7-99fe67e8d81c could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1502.758359] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1502.758558] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.111s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1503.046324] env[68244]: INFO nova.compute.manager [-] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Took 1.26 seconds to deallocate network for instance. [ 1503.553039] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1503.553354] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1503.553575] env[68244]: DEBUG nova.objects.instance [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'resources' on Instance uuid e2f0d059-6415-4a90-98f7-99fe67e8d81c {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1503.752737] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.752947] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.753133] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.753282] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.753425] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.753558] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1504.086472] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37512220-dc58-4e60-ac96-a5f5c40fa7b0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.093876] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed914e0-9d75-4e1b-a739-555a984d16d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.122983] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8127f9-8f6a-49ce-b795-f75bdce4ca09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.129356] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cd8bf2-e67b-4e88-9270-58f45711d31d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.141783] env[68244]: DEBUG nova.compute.provider_tree [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.644932] env[68244]: DEBUG nova.scheduler.client.report [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1505.151054] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.597s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1505.170513] env[68244]: INFO nova.scheduler.client.report [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted allocations for instance e2f0d059-6415-4a90-98f7-99fe67e8d81c [ 1505.678018] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ae5401a9-ef3b-448b-b126-04dd64541624 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "e2f0d059-6415-4a90-98f7-99fe67e8d81c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.998s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1506.558350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.558575] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.060804] env[68244]: DEBUG nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1507.078113] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1507.078270] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Cleaning up deleted instances {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1507.587672] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] There are 26 instances to clean {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1507.587858] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: e2f0d059-6415-4a90-98f7-99fe67e8d81c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1507.590402] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.590657] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.592012] env[68244]: INFO nova.compute.claims [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1508.097714] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 92b166d2-bc8b-44ba-adf0-3286285c7611] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1508.603717] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 2d539645-1fd5-4c8d-813b-129677ebb11c] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1508.717984] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14073320-163b-4e05-ad1c-65265ebb9ae6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.724374] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed4bfd9-b84d-43d3-a58c-57d167a35481 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.753971] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f283efc-f401-453c-bb3c-35353ab8c82a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.760917] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f469f2d4-6a88-41b6-afed-67673ce80b65 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.773491] env[68244]: DEBUG nova.compute.provider_tree [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.106999] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 40ff9cea-7b4f-4ed0-a7a5-32fd818ed6b1] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1509.276726] env[68244]: DEBUG nova.scheduler.client.report [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1509.610508] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ff690eea-6e5a-42a3-bf85-1b844425df2a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1509.781135] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.190s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1509.781664] env[68244]: DEBUG nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1510.113284] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: b6e13d36-31e3-4d07-894e-cc540acdaf21] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1510.286713] env[68244]: DEBUG nova.compute.utils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1510.288058] env[68244]: DEBUG nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Allocating IP information in the background. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1510.288217] env[68244]: DEBUG nova.network.neutron [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] allocate_for_instance() {{(pid=68244) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1510.334922] env[68244]: DEBUG nova.policy [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a17d79ef7b14c178c98a60499967c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e3ca107ec07495cb1876bd472e0cd8a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68244) authorize /opt/stack/nova/nova/policy.py:192}} [ 1510.591196] env[68244]: DEBUG nova.network.neutron [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Successfully created port: 35f35d13-1bda-48cd-b69b-184a462ee37f {{(pid=68244) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1510.616382] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 8aafc14e-418a-4c43-80b9-54da13550c32] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1510.791359] env[68244]: DEBUG nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1511.119579] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ee819188-5e97-4a5f-80a1-3901dfe65f6e] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1511.622911] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 5519079a-d2a5-48c5-921c-199e0fc60aa3] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1511.802794] env[68244]: DEBUG nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1511.830163] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1511.830442] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1511.830596] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1511.830777] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1511.830921] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1511.831078] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1511.831293] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1511.831454] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1511.831620] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1511.831794] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1511.831967] env[68244]: DEBUG nova.virt.hardware [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1511.832922] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0308d6-545e-4b96-af6d-8c74e32ade3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.840835] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278502ce-7fb7-40a4-a722-b9c470ad4b0f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.937235] env[68244]: DEBUG nova.compute.manager [req-4cb1a98b-e4d1-4d9c-85f7-8fbf1a62ee9b req-a36d2d1b-f445-4889-86e1-cd341f410e62 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Received event network-vif-plugged-35f35d13-1bda-48cd-b69b-184a462ee37f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1511.937463] env[68244]: DEBUG oslo_concurrency.lockutils [req-4cb1a98b-e4d1-4d9c-85f7-8fbf1a62ee9b req-a36d2d1b-f445-4889-86e1-cd341f410e62 service nova] Acquiring lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1511.937699] env[68244]: DEBUG oslo_concurrency.lockutils [req-4cb1a98b-e4d1-4d9c-85f7-8fbf1a62ee9b req-a36d2d1b-f445-4889-86e1-cd341f410e62 service nova] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1511.937825] env[68244]: DEBUG oslo_concurrency.lockutils [req-4cb1a98b-e4d1-4d9c-85f7-8fbf1a62ee9b req-a36d2d1b-f445-4889-86e1-cd341f410e62 service nova] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1511.937992] env[68244]: DEBUG nova.compute.manager [req-4cb1a98b-e4d1-4d9c-85f7-8fbf1a62ee9b req-a36d2d1b-f445-4889-86e1-cd341f410e62 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] No waiting events found dispatching network-vif-plugged-35f35d13-1bda-48cd-b69b-184a462ee37f {{(pid=68244) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1511.938165] env[68244]: WARNING nova.compute.manager [req-4cb1a98b-e4d1-4d9c-85f7-8fbf1a62ee9b req-a36d2d1b-f445-4889-86e1-cd341f410e62 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Received unexpected event network-vif-plugged-35f35d13-1bda-48cd-b69b-184a462ee37f for instance with vm_state building and task_state spawning. [ 1512.019117] env[68244]: DEBUG nova.network.neutron [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Successfully updated port: 35f35d13-1bda-48cd-b69b-184a462ee37f {{(pid=68244) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1512.126083] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 3ac3a4a2-99c5-4fc7-8301-97cfdb7e6715] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1512.522083] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.522249] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1512.522399] env[68244]: DEBUG nova.network.neutron [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.629539] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: d4adee86-43f1-4d6f-a4a5-8cce39e1f03e] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1513.054159] env[68244]: DEBUG nova.network.neutron [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1513.132685] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: ae4d8900-3185-4747-ba8d-fe334d9e3237] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1513.172946] env[68244]: DEBUG nova.network.neutron [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance_info_cache with network_info: [{"id": "35f35d13-1bda-48cd-b69b-184a462ee37f", "address": "fa:16:3e:dd:f4:0b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f35d13-1b", "ovs_interfaceid": "35f35d13-1bda-48cd-b69b-184a462ee37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.635628] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 0c949d23-d98f-47d2-9f3c-d520df035d55] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1513.675763] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1513.676093] env[68244]: DEBUG nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Instance network_info: |[{"id": "35f35d13-1bda-48cd-b69b-184a462ee37f", "address": "fa:16:3e:dd:f4:0b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f35d13-1b", "ovs_interfaceid": "35f35d13-1bda-48cd-b69b-184a462ee37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1513.676544] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:f4:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35f35d13-1bda-48cd-b69b-184a462ee37f', 'vif_model': 'vmxnet3'}] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1513.683999] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1513.684214] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1513.684484] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7674a52c-6cb4-4d25-b1a8-b065fd7a8dab {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.706430] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1513.706430] env[68244]: value = "task-2781625" [ 1513.706430] env[68244]: _type = "Task" [ 1513.706430] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.713798] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781625, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.962428] env[68244]: DEBUG nova.compute.manager [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Received event network-changed-35f35d13-1bda-48cd-b69b-184a462ee37f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1513.962546] env[68244]: DEBUG nova.compute.manager [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Refreshing instance network info cache due to event network-changed-35f35d13-1bda-48cd-b69b-184a462ee37f. {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1513.962759] env[68244]: DEBUG oslo_concurrency.lockutils [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] Acquiring lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.962902] env[68244]: DEBUG oslo_concurrency.lockutils [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] Acquired lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1513.963076] env[68244]: DEBUG nova.network.neutron [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Refreshing network info cache for port 35f35d13-1bda-48cd-b69b-184a462ee37f {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1514.139064] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 256a0329-07b6-4bc2-a574-6e5a108d301a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1514.217469] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781625, 'name': CreateVM_Task, 'duration_secs': 0.300739} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.217595] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1514.218253] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.218417] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1514.218750] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1514.218994] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-888de2f7-cc66-4bcb-bf72-b421047574c8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.223616] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1514.223616] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52349b0c-7b75-f5c4-50cd-0d4ad06ade42" [ 1514.223616] env[68244]: _type = "Task" [ 1514.223616] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.230925] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52349b0c-7b75-f5c4-50cd-0d4ad06ade42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.639206] env[68244]: DEBUG nova.network.neutron [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updated VIF entry in instance network info cache for port 35f35d13-1bda-48cd-b69b-184a462ee37f. {{(pid=68244) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1514.639593] env[68244]: DEBUG nova.network.neutron [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance_info_cache with network_info: [{"id": "35f35d13-1bda-48cd-b69b-184a462ee37f", "address": "fa:16:3e:dd:f4:0b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f35d13-1b", "ovs_interfaceid": "35f35d13-1bda-48cd-b69b-184a462ee37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.641444] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 64467948-35bb-4ad7-ac76-bbbd6f66e96f] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1514.733983] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52349b0c-7b75-f5c4-50cd-0d4ad06ade42, 'name': SearchDatastore_Task, 'duration_secs': 0.00942} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.734319] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1514.734554] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1514.734793] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.734941] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1514.735483] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1514.735483] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f3ee1f2-6e0c-4359-938e-f49001e2e4e5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.743785] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1514.743959] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1514.744671] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a5edea5-ea4a-4248-976d-c429b75fa990 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.749167] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1514.749167] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fdefa0-f502-6c2c-3f5a-4a7ec0f4b159" [ 1514.749167] env[68244]: _type = "Task" [ 1514.749167] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.756219] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fdefa0-f502-6c2c-3f5a-4a7ec0f4b159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.142092] env[68244]: DEBUG oslo_concurrency.lockutils [req-63b3b8b4-4dcc-48b4-9571-ed9dea28e3e3 req-2e8e5eb1-cd07-471e-bcd3-8825c1900665 service nova] Releasing lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1515.144794] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 7ee2b5ee-58e5-4d31-952c-37a8411c6244] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1515.259303] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fdefa0-f502-6c2c-3f5a-4a7ec0f4b159, 'name': SearchDatastore_Task, 'duration_secs': 0.008876} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.260066] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d3be2b-748f-481f-b352-0725fd546f59 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.264736] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1515.264736] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52025604-76ef-0368-0d75-ba7fce521303" [ 1515.264736] env[68244]: _type = "Task" [ 1515.264736] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.271714] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52025604-76ef-0368-0d75-ba7fce521303, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.647847] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 0597e8ed-2f24-44c7-ac92-06af34d6a4fa] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1515.774950] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52025604-76ef-0368-0d75-ba7fce521303, 'name': SearchDatastore_Task, 'duration_secs': 0.009657} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.775205] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1515.775487] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 4a5d6fc8-5426-4854-ae4d-e009243c6a15/4a5d6fc8-5426-4854-ae4d-e009243c6a15.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1515.775747] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd046818-496a-4474-86ee-7961c5ff2a2e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.781839] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1515.781839] env[68244]: value = "task-2781626" [ 1515.781839] env[68244]: _type = "Task" [ 1515.781839] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.789071] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.150908] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: a9820dc4-f52e-453c-9acf-a6a0c9a23580] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1516.292180] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.420229} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.292405] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 4a5d6fc8-5426-4854-ae4d-e009243c6a15/4a5d6fc8-5426-4854-ae4d-e009243c6a15.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1516.292628] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1516.292896] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01ebec8f-1fd1-4503-be76-34ea01965fa0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.299342] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1516.299342] env[68244]: value = "task-2781627" [ 1516.299342] env[68244]: _type = "Task" [ 1516.299342] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.306064] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.654953] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: f1143201-5ee1-45be-b2b1-4314a26aa10a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1516.809600] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06259} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.809715] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1516.810931] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4c1a35-575e-4024-84be-fbeadf23c92e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.831561] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 4a5d6fc8-5426-4854-ae4d-e009243c6a15/4a5d6fc8-5426-4854-ae4d-e009243c6a15.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1516.832047] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0081d51c-4216-4a06-a7ee-0d334d870aee {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.850189] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1516.850189] env[68244]: value = "task-2781628" [ 1516.850189] env[68244]: _type = "Task" [ 1516.850189] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.857398] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.158009] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 375c4371-3537-4a94-987e-0f6f72a690b8] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1517.359866] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781628, 'name': ReconfigVM_Task, 'duration_secs': 0.281171} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.360092] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 4a5d6fc8-5426-4854-ae4d-e009243c6a15/4a5d6fc8-5426-4854-ae4d-e009243c6a15.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1517.360754] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09fe0bcb-2334-40e2-8e66-ea1861b9a280 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.366978] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1517.366978] env[68244]: value = "task-2781629" [ 1517.366978] env[68244]: _type = "Task" [ 1517.366978] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.374515] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781629, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.661414] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 77ba8e47-10bb-4630-bd89-067f5ad7bad9] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1517.877281] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781629, 'name': Rename_Task, 'duration_secs': 0.143184} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.877549] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1517.877793] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8e9175d-2139-42e0-915e-dd314608c961 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.884267] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1517.884267] env[68244]: value = "task-2781630" [ 1517.884267] env[68244]: _type = "Task" [ 1517.884267] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.891369] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.165320] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: da2d34f8-44fc-4c37-ba42-50e6ecf1a2ab] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1518.394304] env[68244]: DEBUG oslo_vmware.api [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781630, 'name': PowerOnVM_Task, 'duration_secs': 0.452053} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.394620] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1518.394823] env[68244]: INFO nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Took 6.59 seconds to spawn the instance on the hypervisor. [ 1518.395056] env[68244]: DEBUG nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1518.395817] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c251997-53de-4d63-b02f-a605c472a246 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.668996] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 7f3b21f4-fb8f-493b-bf4a-23cf4bd69d47] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1518.913966] env[68244]: INFO nova.compute.manager [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Took 11.35 seconds to build instance. [ 1519.172181] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: dfe017bb-d860-4da6-abe5-7e8d7a7dd05a] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1519.416543] env[68244]: DEBUG oslo_concurrency.lockutils [None req-1b24ff1e-55ef-4fbc-bfa4-ca61098bd0d0 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.858s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1519.676018] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: 75bec02f-82f7-4e8d-81da-3c511588be29] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1520.181232] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] [instance: a6b7b165-2d53-4dc5-8b3a-f98b3144a1c1] Instance has had 0 of 5 cleanup attempts {{(pid=68244) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1520.311560] env[68244]: DEBUG nova.compute.manager [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Stashing vm_state: active {{(pid=68244) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1520.829032] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1520.829200] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1521.333956] env[68244]: INFO nova.compute.claims [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1521.840042] env[68244]: INFO nova.compute.resource_tracker [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating resource usage from migration 54320e4c-13e8-48fe-8204-4e47d4f697da [ 1521.878429] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b9ec70-f621-4f5b-b1d5-3a4acfe06ba9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.885821] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c149ba-03a3-4aa9-842f-4c15fdd731e1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.916253] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4166d79f-b54e-469e-a0d3-8dbba00eebb5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.922881] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9737980b-c81a-4e21-8145-97f48b4392a5 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.936343] env[68244]: DEBUG nova.compute.provider_tree [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1522.440079] env[68244]: DEBUG nova.scheduler.client.report [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1522.944799] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.115s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1522.945048] env[68244]: INFO nova.compute.manager [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Migrating [ 1523.459614] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.459932] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1523.459932] env[68244]: DEBUG nova.network.neutron [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1524.134631] env[68244]: DEBUG nova.network.neutron [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance_info_cache with network_info: [{"id": "35f35d13-1bda-48cd-b69b-184a462ee37f", "address": "fa:16:3e:dd:f4:0b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f35d13-1b", "ovs_interfaceid": "35f35d13-1bda-48cd-b69b-184a462ee37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.638516] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1526.154935] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474e1781-f833-4a8b-b192-aeaab8393695 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.176317] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance '4a5d6fc8-5426-4854-ae4d-e009243c6a15' progress to 0 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1526.682772] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.683111] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67109c61-6250-4aa5-8aab-acc069d75aef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.691059] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1526.691059] env[68244]: value = "task-2781631" [ 1526.691059] env[68244]: _type = "Task" [ 1526.691059] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.699763] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.205212] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781631, 'name': PowerOffVM_Task, 'duration_secs': 0.199288} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.205572] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1527.205851] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance '4a5d6fc8-5426-4854-ae4d-e009243c6a15' progress to 17 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1527.714399] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1527.714689] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1527.714897] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1527.715031] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1527.715181] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1527.715328] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1527.715573] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1527.715757] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1527.715930] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1527.716107] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1527.716294] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1527.721356] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af4ff29c-7f9c-4b2b-954c-38e5773998b8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.737839] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1527.737839] env[68244]: value = "task-2781632" [ 1527.737839] env[68244]: _type = "Task" [ 1527.737839] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.745684] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.247794] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781632, 'name': ReconfigVM_Task, 'duration_secs': 0.166155} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.248174] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance '4a5d6fc8-5426-4854-ae4d-e009243c6a15' progress to 33 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1528.754875] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1528.755138] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1528.755266] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1528.755460] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1528.755626] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1528.755772] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1528.755978] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1528.756153] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1528.756319] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1528.756508] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1528.756713] env[68244]: DEBUG nova.virt.hardware [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1528.761988] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1528.762285] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a076286-7464-4399-b36e-ade9a0a79e56 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.780943] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1528.780943] env[68244]: value = "task-2781633" [ 1528.780943] env[68244]: _type = "Task" [ 1528.780943] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.791034] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781633, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.290761] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781633, 'name': ReconfigVM_Task, 'duration_secs': 0.149227} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.291113] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68244) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1529.291784] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d98ddca-3342-4fc1-b227-a2f11681a90c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.312511] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 4a5d6fc8-5426-4854-ae4d-e009243c6a15/4a5d6fc8-5426-4854-ae4d-e009243c6a15.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1529.312769] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7e15667-f8bb-4642-9699-168e6a2076ef {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.329661] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1529.329661] env[68244]: value = "task-2781634" [ 1529.329661] env[68244]: _type = "Task" [ 1529.329661] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.336748] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781634, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.839330] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781634, 'name': ReconfigVM_Task, 'duration_secs': 0.259143} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.839607] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 4a5d6fc8-5426-4854-ae4d-e009243c6a15/4a5d6fc8-5426-4854-ae4d-e009243c6a15.vmdk or device None with type thin {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1529.839890] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance '4a5d6fc8-5426-4854-ae4d-e009243c6a15' progress to 50 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1530.346672] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c191ab-6eb1-4aab-b24c-5c75dc9f3de0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.365352] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1b462a-e351-4f78-a2f0-633ee867e7e7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.382126] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance '4a5d6fc8-5426-4854-ae4d-e009243c6a15' progress to 67 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1530.920105] env[68244]: DEBUG nova.network.neutron [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Port 35f35d13-1bda-48cd-b69b-184a462ee37f binding to destination host cpu-1 is already ACTIVE {{(pid=68244) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1531.943739] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1531.944117] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1531.944387] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1532.976654] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.976890] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1532.977074] env[68244]: DEBUG nova.network.neutron [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1533.653632] env[68244]: DEBUG nova.network.neutron [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance_info_cache with network_info: [{"id": "35f35d13-1bda-48cd-b69b-184a462ee37f", "address": "fa:16:3e:dd:f4:0b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f35d13-1b", "ovs_interfaceid": "35f35d13-1bda-48cd-b69b-184a462ee37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.156823] env[68244]: DEBUG oslo_concurrency.lockutils [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1534.680148] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3b2212-0058-490a-9e92-1e36a4b8efbf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.698667] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdf8949-45aa-4421-9dfe-09e3252fe970 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.705397] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance '4a5d6fc8-5426-4854-ae4d-e009243c6a15' progress to 83 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1535.211711] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1535.212044] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbf4ec5f-d9d6-46c3-9768-879f230b1716 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.219449] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1535.219449] env[68244]: value = "task-2781635" [ 1535.219449] env[68244]: _type = "Task" [ 1535.219449] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.227017] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781635, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.729569] env[68244]: DEBUG oslo_vmware.api [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781635, 'name': PowerOnVM_Task, 'duration_secs': 0.355425} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.729837] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1535.730033] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-74cc1732-72c1-4262-b29c-9b833ca82f14 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance '4a5d6fc8-5426-4854-ae4d-e009243c6a15' progress to 100 {{(pid=68244) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1538.550086] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1538.550533] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1538.550766] env[68244]: DEBUG nova.compute.manager [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Going to confirm migration 10 {{(pid=68244) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1539.723762] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.724141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquired lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1539.724141] env[68244]: DEBUG nova.network.neutron [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1539.724370] env[68244]: DEBUG nova.objects.instance [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'info_cache' on Instance uuid 4a5d6fc8-5426-4854-ae4d-e009243c6a15 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1540.902375] env[68244]: DEBUG nova.network.neutron [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance_info_cache with network_info: [{"id": "35f35d13-1bda-48cd-b69b-184a462ee37f", "address": "fa:16:3e:dd:f4:0b", "network": {"id": "de1d2570-2d8c-4a5c-8e99-7807cf5e95a9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-957860387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e3ca107ec07495cb1876bd472e0cd8a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f35d13-1b", "ovs_interfaceid": "35f35d13-1bda-48cd-b69b-184a462ee37f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.405116] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Releasing lock "refresh_cache-4a5d6fc8-5426-4854-ae4d-e009243c6a15" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1541.405350] env[68244]: DEBUG nova.objects.instance [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lazy-loading 'migration_context' on Instance uuid 4a5d6fc8-5426-4854-ae4d-e009243c6a15 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1541.908081] env[68244]: DEBUG nova.objects.base [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Object Instance<4a5d6fc8-5426-4854-ae4d-e009243c6a15> lazy-loaded attributes: info_cache,migration_context {{(pid=68244) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1541.909064] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058b8e2d-db85-48e6-b741-2183274cf725 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.929492] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e109ba7a-8d03-4ff6-9b28-5e5f07202a84 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.934539] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1541.934539] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f268f3-e8ae-96d5-c61e-4a0bc6cc8175" [ 1541.934539] env[68244]: _type = "Task" [ 1541.934539] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.941691] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f268f3-e8ae-96d5-c61e-4a0bc6cc8175, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.444574] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52f268f3-e8ae-96d5-c61e-4a0bc6cc8175, 'name': SearchDatastore_Task, 'duration_secs': 0.007955} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.444862] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1542.445114] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1542.987392] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ccb434-3537-495a-a614-2d01ffdf6a54 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.994514] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1a0a59-7a74-415f-aab5-998b4808018b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.024117] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e1c158-79aa-4811-87ba-7a38d8f857cc {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.031125] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5582939-b737-47f0-ba58-8079216e63e4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.043546] env[68244]: DEBUG nova.compute.provider_tree [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.546671] env[68244]: DEBUG nova.scheduler.client.report [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1544.559319] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.114s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1545.112544] env[68244]: INFO nova.scheduler.client.report [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted allocation for migration 54320e4c-13e8-48fe-8204-4e47d4f697da [ 1545.619152] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.069s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1547.077513] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1547.077852] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1547.078069] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1547.079052] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1547.079052] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1547.080840] env[68244]: INFO nova.compute.manager [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Terminating instance [ 1547.585118] env[68244]: DEBUG nova.compute.manager [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1547.585382] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1547.586322] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cef451-34b4-4594-b181-ad7727e32dbf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.594125] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1547.594352] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb15e483-03c2-4539-bdad-fad24e7a1744 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.600373] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1547.600373] env[68244]: value = "task-2781636" [ 1547.600373] env[68244]: _type = "Task" [ 1547.600373] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.608283] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781636, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.110186] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781636, 'name': PowerOffVM_Task, 'duration_secs': 0.197689} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.110529] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1548.110660] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1548.110849] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-caf3c5ca-901c-4879-962f-f908d994d9ec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.177304] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1548.177524] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1548.177683] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleting the datastore file [datastore2] 4a5d6fc8-5426-4854-ae4d-e009243c6a15 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1548.177957] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d200593f-2e75-47f8-acb3-1472a39d7187 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.184465] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for the task: (returnval){ [ 1548.184465] env[68244]: value = "task-2781638" [ 1548.184465] env[68244]: _type = "Task" [ 1548.184465] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.191853] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781638, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.694532] env[68244]: DEBUG oslo_vmware.api [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Task: {'id': task-2781638, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12884} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.694829] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1548.695010] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1548.695198] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1548.695376] env[68244]: INFO nova.compute.manager [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1548.695613] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1548.695841] env[68244]: DEBUG nova.compute.manager [-] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1548.695933] env[68244]: DEBUG nova.network.neutron [-] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1548.974444] env[68244]: DEBUG nova.compute.manager [req-a0d14d4b-e648-4842-8206-a3d922735bd8 req-b8966ef7-3c05-4c43-9826-a821bd050999 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Received event network-vif-deleted-35f35d13-1bda-48cd-b69b-184a462ee37f {{(pid=68244) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1548.974656] env[68244]: INFO nova.compute.manager [req-a0d14d4b-e648-4842-8206-a3d922735bd8 req-b8966ef7-3c05-4c43-9826-a821bd050999 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Neutron deleted interface 35f35d13-1bda-48cd-b69b-184a462ee37f; detaching it from the instance and deleting it from the info cache [ 1548.974836] env[68244]: DEBUG nova.network.neutron [req-a0d14d4b-e648-4842-8206-a3d922735bd8 req-b8966ef7-3c05-4c43-9826-a821bd050999 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.456138] env[68244]: DEBUG nova.network.neutron [-] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.477442] env[68244]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20641133-b75b-4454-8204-78189ff34875 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.489943] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33c93f0-15e5-49a4-a45b-9dd100936839 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.513930] env[68244]: DEBUG nova.compute.manager [req-a0d14d4b-e648-4842-8206-a3d922735bd8 req-b8966ef7-3c05-4c43-9826-a821bd050999 service nova] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Detach interface failed, port_id=35f35d13-1bda-48cd-b69b-184a462ee37f, reason: Instance 4a5d6fc8-5426-4854-ae4d-e009243c6a15 could not be found. {{(pid=68244) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1549.959560] env[68244]: INFO nova.compute.manager [-] [instance: 4a5d6fc8-5426-4854-ae4d-e009243c6a15] Took 1.26 seconds to deallocate network for instance. [ 1550.467297] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1550.467737] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1550.467782] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1550.490047] env[68244]: INFO nova.scheduler.client.report [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Deleted allocations for instance 4a5d6fc8-5426-4854-ae4d-e009243c6a15 [ 1550.997368] env[68244]: DEBUG oslo_concurrency.lockutils [None req-ea07e019-a28c-4431-8390-33a6bc59d297 tempest-DeleteServersTestJSON-1755156045 tempest-DeleteServersTestJSON-1755156045-project-member] Lock "4a5d6fc8-5426-4854-ae4d-e009243c6a15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.919s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1557.332086] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "7ef698d0-b436-43a0-85d4-210006c37122" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.332370] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "7ef698d0-b436-43a0-85d4-210006c37122" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.509386] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "9ec3f40a-f15c-49de-bb72-89597f4550f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.509614] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "9ec3f40a-f15c-49de-bb72-89597f4550f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.834680] env[68244]: DEBUG nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1558.011708] env[68244]: DEBUG nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Starting instance... {{(pid=68244) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1558.357092] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1558.357374] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1558.358888] env[68244]: INFO nova.compute.claims [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1558.530350] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1559.402050] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e387c3d6-e0ae-4d1c-a582-15c1cf7b1596 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.409272] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cf3ad8-22bb-4f32-bd87-9bc1c0731f69 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.439013] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4456585e-cf0a-4b8d-b8d4-cd5f0479a0a2 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.445590] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95470ce3-7fa7-42c3-9cee-9db54027f962 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.458403] env[68244]: DEBUG nova.compute.provider_tree [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.962132] env[68244]: DEBUG nova.scheduler.client.report [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1560.467617] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1560.468176] env[68244]: DEBUG nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1560.471476] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.941s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1560.473550] env[68244]: INFO nova.compute.claims [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1560.976801] env[68244]: DEBUG nova.compute.utils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1560.980137] env[68244]: DEBUG nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1561.481348] env[68244]: DEBUG nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1561.521731] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a349107-3360-4458-9783-ac949de417a4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.529833] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6f4068-d8e5-49a6-8ed7-6b947c7edf28 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.560633] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f108af2d-2c6f-45e4-bf89-06b174cc0306 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.567551] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b67c8e-c83d-43dc-9d4d-684a58257061 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.580346] env[68244]: DEBUG nova.compute.provider_tree [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1562.083248] env[68244]: DEBUG nova.scheduler.client.report [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1562.493530] env[68244]: DEBUG nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1562.520363] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1562.520622] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1562.520776] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1562.520961] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1562.521121] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1562.521291] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1562.521498] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1562.521656] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1562.521820] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1562.521981] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1562.522168] env[68244]: DEBUG nova.virt.hardware [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1562.523035] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4056b016-df3d-41d1-8475-9123312a63d6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.530962] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0aa9458-d9e0-46d7-879d-70da4e923c93 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.543617] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1562.549085] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Creating folder: Project (4ed38cfed9ae4db7ad00bc4038305afe). Parent ref: group-v558876. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1562.549335] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-838db09f-6652-469a-8d13-73a99a966db3 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.560063] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Created folder: Project (4ed38cfed9ae4db7ad00bc4038305afe) in parent group-v558876. [ 1562.560263] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Creating folder: Instances. Parent ref: group-v559218. {{(pid=68244) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1562.560468] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af3c33f5-ee01-4a41-be4c-05472e2123d4 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.569550] env[68244]: INFO nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Created folder: Instances in parent group-v559218. [ 1562.569760] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1562.569934] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1562.570132] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2d20e47-70f3-4915-8e58-bc016d92974a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.585704] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1562.585704] env[68244]: value = "task-2781642" [ 1562.585704] env[68244]: _type = "Task" [ 1562.585704] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.588739] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.117s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1562.589203] env[68244]: DEBUG nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Start building networks asynchronously for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1562.595580] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781642, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.097905] env[68244]: DEBUG nova.compute.utils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Using /dev/sd instead of None {{(pid=68244) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1563.099143] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781642, 'name': CreateVM_Task, 'duration_secs': 0.245515} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.099416] env[68244]: DEBUG nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Not allocating networking since 'none' was specified. {{(pid=68244) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1563.099544] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1563.100098] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.100258] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1563.100561] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1563.100791] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f07c094-aea1-41f3-8527-c7935be27c39 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.105122] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1563.105122] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5282e6fe-d20f-e516-6e59-31d654f6b9b2" [ 1563.105122] env[68244]: _type = "Task" [ 1563.105122] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.113476] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5282e6fe-d20f-e516-6e59-31d654f6b9b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.601043] env[68244]: DEBUG nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Start building block device mappings for instance. {{(pid=68244) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1563.618696] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5282e6fe-d20f-e516-6e59-31d654f6b9b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010438} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.619141] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1563.619501] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1563.619864] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.620132] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1563.620425] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1563.620793] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22c01f86-7c1d-4187-96e9-6052c315799d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.629607] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1563.629867] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1563.630902] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc83d740-3d60-42b6-927f-f0d3eed93bb9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.637022] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1563.637022] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ad9069-fae3-8e3a-6ef7-2be669370d36" [ 1563.637022] env[68244]: _type = "Task" [ 1563.637022] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.645613] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ad9069-fae3-8e3a-6ef7-2be669370d36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.148537] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52ad9069-fae3-8e3a-6ef7-2be669370d36, 'name': SearchDatastore_Task, 'duration_secs': 0.008401} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.149321] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf58e33f-a771-4be4-9d74-f8bc282857e6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.155308] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1564.155308] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fc28dd-1a61-fb05-b26f-ea3c45c6774c" [ 1564.155308] env[68244]: _type = "Task" [ 1564.155308] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.163313] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fc28dd-1a61-fb05-b26f-ea3c45c6774c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.609843] env[68244]: DEBUG nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Start spawning the instance on the hypervisor. {{(pid=68244) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1564.630474] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1564.630721] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1564.630878] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1564.631072] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1564.631222] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1564.631370] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1564.631576] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1564.631735] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1564.631909] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1564.632129] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1564.632312] env[68244]: DEBUG nova.virt.hardware [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1564.633163] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599ff26a-6d9d-4914-ad64-de82dfbd7b5a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.640896] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569522c9-e4fc-43b4-8fb1-a231347ae2fe {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.653811] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1564.659321] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1564.659891] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1564.662620] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2472ac53-70c4-406f-b1e7-a8118cc66d19 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.679142] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52fc28dd-1a61-fb05-b26f-ea3c45c6774c, 'name': SearchDatastore_Task, 'duration_secs': 0.009455} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.680472] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1564.680843] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7ef698d0-b436-43a0-85d4-210006c37122/7ef698d0-b436-43a0-85d4-210006c37122.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1564.681161] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1564.681161] env[68244]: value = "task-2781643" [ 1564.681161] env[68244]: _type = "Task" [ 1564.681161] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.681393] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f838da8-70dd-4df9-b901-28ed19499536 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.690324] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781643, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.691425] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1564.691425] env[68244]: value = "task-2781644" [ 1564.691425] env[68244]: _type = "Task" [ 1564.691425] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.699506] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781644, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.193552] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781643, 'name': CreateVM_Task, 'duration_secs': 0.325637} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.196221] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1565.196619] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.196777] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1565.197092] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1565.197620] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70b53fbc-1ed1-4bcf-b9c0-096f160fec8e {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.201804] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781644, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.42726} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.202355] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 7ef698d0-b436-43a0-85d4-210006c37122/7ef698d0-b436-43a0-85d4-210006c37122.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1565.202555] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1565.202791] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a596c07-3283-41e6-886c-2a8118743eec {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.205248] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1565.205248] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a2189-0403-bbae-2696-cd1f4762b9c0" [ 1565.205248] env[68244]: _type = "Task" [ 1565.205248] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.209557] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1565.209557] env[68244]: value = "task-2781645" [ 1565.209557] env[68244]: _type = "Task" [ 1565.209557] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.212445] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a2189-0403-bbae-2696-cd1f4762b9c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.219148] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781645, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.715840] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]522a2189-0403-bbae-2696-cd1f4762b9c0, 'name': SearchDatastore_Task, 'duration_secs': 0.008598} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.718850] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1565.719094] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1565.719330] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.719475] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1565.719648] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1565.719900] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-167dd0cd-8e9e-4d1b-ace6-28080c774417 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.725906] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781645, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067664} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.726217] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1565.726941] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3950de2f-e5c4-46ca-b9d8-05f3afe07310 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.729868] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1565.730051] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1565.731017] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c93dee4f-b9da-42bd-8667-5285220e1a2d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.747457] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 7ef698d0-b436-43a0-85d4-210006c37122/7ef698d0-b436-43a0-85d4-210006c37122.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1565.747950] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-318dfc67-7b90-48c8-b3ef-fa907a790b23 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.762721] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1565.762721] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227acda-cf10-55a6-3d50-6e6f841b1312" [ 1565.762721] env[68244]: _type = "Task" [ 1565.762721] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.767440] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1565.767440] env[68244]: value = "task-2781646" [ 1565.767440] env[68244]: _type = "Task" [ 1565.767440] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.773252] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5227acda-cf10-55a6-3d50-6e6f841b1312, 'name': SearchDatastore_Task, 'duration_secs': 0.008087} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.774316] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d198041a-5aff-4eb4-a364-1863e865a687 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.778992] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781646, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.781730] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1565.781730] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5292372a-629a-9c0b-647e-34346db87dcc" [ 1565.781730] env[68244]: _type = "Task" [ 1565.781730] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.789109] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5292372a-629a-9c0b-647e-34346db87dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.277276] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781646, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.290835] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]5292372a-629a-9c0b-647e-34346db87dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.017417} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.291096] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1566.291363] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1566.291637] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d8ca2cb-168a-4522-9b1e-fdb56049e380 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.297840] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1566.297840] env[68244]: value = "task-2781647" [ 1566.297840] env[68244]: _type = "Task" [ 1566.297840] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.305117] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.778305] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781646, 'name': ReconfigVM_Task, 'duration_secs': 0.514035} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.778662] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 7ef698d0-b436-43a0-85d4-210006c37122/7ef698d0-b436-43a0-85d4-210006c37122.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1566.779196] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6fb96db-4ea6-4b76-8abb-1a1e4cf10e46 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.785413] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1566.785413] env[68244]: value = "task-2781648" [ 1566.785413] env[68244]: _type = "Task" [ 1566.785413] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.793103] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781648, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.805238] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781647, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434973} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.805463] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1566.805667] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1566.805903] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54bfbd99-8fa9-4455-b1b6-9c1acd50cb1f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.812626] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1566.812626] env[68244]: value = "task-2781649" [ 1566.812626] env[68244]: _type = "Task" [ 1566.812626] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.819929] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781649, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.295762] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781648, 'name': Rename_Task, 'duration_secs': 0.143614} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.296102] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1567.296366] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57f4aa35-ab77-4aab-963c-c06ee4f6f48f {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.302653] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1567.302653] env[68244]: value = "task-2781650" [ 1567.302653] env[68244]: _type = "Task" [ 1567.302653] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.309738] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.319621] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781649, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065157} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.319857] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1567.320726] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a787a72b-97a7-4e92-9dbb-3e1ecd1779ae {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.339863] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1567.340121] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caa2471d-7fa0-4137-95f2-bf59824c786d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.359821] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1567.359821] env[68244]: value = "task-2781651" [ 1567.359821] env[68244]: _type = "Task" [ 1567.359821] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.367270] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781651, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.683860] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.684048] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.684215] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.684363] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.684552] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.684711] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.684891] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.685057] env[68244]: DEBUG nova.compute.manager [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68244) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1567.685212] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.812436] env[68244]: DEBUG oslo_vmware.api [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781650, 'name': PowerOnVM_Task, 'duration_secs': 0.454835} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.812831] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1567.812980] env[68244]: INFO nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Took 5.32 seconds to spawn the instance on the hypervisor. [ 1567.813103] env[68244]: DEBUG nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1567.813863] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d6217b-dac4-4765-a2d5-92dba8bd5959 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.868156] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781651, 'name': ReconfigVM_Task, 'duration_secs': 0.281059} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.868638] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Reconfigured VM instance instance-0000007f to attach disk [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1567.869215] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24f76e1b-889c-4dae-ad0f-09cc2f9bd3c7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.875358] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1567.875358] env[68244]: value = "task-2781652" [ 1567.875358] env[68244]: _type = "Task" [ 1567.875358] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.882364] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781652, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.187939] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1568.188232] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1568.188410] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1568.188566] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68244) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1568.189504] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abce325-18a8-46a9-989f-4d8180285ae9 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.198064] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a4110a-834a-46a9-ba3c-32590541af7b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.212884] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15b92aa-6d85-4ba8-bbbe-11e550951e9b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.219157] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfb8993-62e1-419a-ad2c-5fef443ca0c0 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.248125] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181080MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68244) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1568.248256] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1568.248459] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1568.327939] env[68244]: INFO nova.compute.manager [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Took 9.99 seconds to build instance. [ 1568.385156] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781652, 'name': Rename_Task, 'duration_secs': 0.132137} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.385421] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1568.385648] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49201140-3c65-466e-895f-13dcba7cd784 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.391225] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1568.391225] env[68244]: value = "task-2781653" [ 1568.391225] env[68244]: _type = "Task" [ 1568.391225] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.398424] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781653, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.829774] env[68244]: DEBUG oslo_concurrency.lockutils [None req-50fc217a-dae8-46b4-9207-aeb17bb9f341 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "7ef698d0-b436-43a0-85d4-210006c37122" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.497s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1568.901206] env[68244]: DEBUG oslo_vmware.api [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781653, 'name': PowerOnVM_Task, 'duration_secs': 0.414004} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.901448] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1568.901609] env[68244]: INFO nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Took 4.29 seconds to spawn the instance on the hypervisor. [ 1568.901788] env[68244]: DEBUG nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1568.902547] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c04b47f-2779-4deb-8d9a-7f5fb9fd35de {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.270550] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 7ef698d0-b436-43a0-85d4-210006c37122 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.270710] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Instance 9ec3f40a-f15c-49de-bb72-89597f4550f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68244) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1569.270889] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1569.271044] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68244) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1569.306046] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f996ac65-7adb-48cd-b759-233b9ec08197 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.312369] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4e7d63-7c50-4df6-ae09-892996d65dde {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.342233] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6074797c-3e07-4199-9d87-e7a76771e811 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.348838] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28123f2-f02a-4eea-9448-7d3b93d99cf7 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.361566] env[68244]: DEBUG nova.compute.provider_tree [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.416482] env[68244]: INFO nova.compute.manager [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Took 10.90 seconds to build instance. [ 1569.865029] env[68244]: DEBUG nova.scheduler.client.report [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1569.918689] env[68244]: DEBUG oslo_concurrency.lockutils [None req-5992eec7-d8f7-4ed1-9441-2df3c78cfba9 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "9ec3f40a-f15c-49de-bb72-89597f4550f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.409s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1570.350018] env[68244]: INFO nova.compute.manager [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Rebuilding instance [ 1570.368826] env[68244]: DEBUG nova.compute.resource_tracker [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68244) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1570.369031] env[68244]: DEBUG oslo_concurrency.lockutils [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.121s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1570.387147] env[68244]: DEBUG nova.compute.manager [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1570.388044] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7324fea-2f1b-4cd1-900f-751130e97cf1 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.400891] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.401365] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dcd724e-832f-4e57-9051-5504fd5ad411 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.408617] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1571.408617] env[68244]: value = "task-2781654" [ 1571.408617] env[68244]: _type = "Task" [ 1571.408617] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.417068] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781654, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.919045] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781654, 'name': PowerOffVM_Task, 'duration_secs': 0.116189} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.919331] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1571.919555] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1571.920321] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d85766-f847-4367-8455-5d98e14f1bf6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.926785] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1571.926956] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d53775af-18a4-442d-942b-384275891c51 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.949891] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1571.950138] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1571.950280] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleting the datastore file [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1571.950520] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d61c9e40-62cd-488d-ac2c-d8159cd89d5b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.957553] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1571.957553] env[68244]: value = "task-2781656" [ 1571.957553] env[68244]: _type = "Task" [ 1571.957553] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.964538] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.467409] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126865} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.467810] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1572.467849] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1572.468029] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1573.498017] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:19:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:18:49Z,direct_url=,disk_format='vmdk',id=9aa0b4d1-af1b-4141-9ca6-95525b722d7e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a5e968a8ae42464696bc8ffe1ee86197',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:18:49Z,virtual_size=,visibility=), allow threads: False {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1573.498306] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Flavor limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1573.498445] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Image limits 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1573.498626] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Flavor pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1573.498775] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Image pref 0:0:0 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1573.498922] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68244) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1573.499147] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1573.499308] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1573.499472] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Got 1 possible topologies {{(pid=68244) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1573.499634] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1573.499805] env[68244]: DEBUG nova.virt.hardware [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68244) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1573.500699] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d3bd03-2111-4321-bc90-d2491021be09 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.508555] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be5ac02-dc71-4f8d-8f96-3f7a4bde1b47 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.521609] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Instance VIF info [] {{(pid=68244) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1573.527152] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1573.527370] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Creating VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1573.527561] env[68244]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3bf317a-a414-4241-96da-0e28ca03340b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.544717] env[68244]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1573.544717] env[68244]: value = "task-2781657" [ 1573.544717] env[68244]: _type = "Task" [ 1573.544717] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.551481] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781657, 'name': CreateVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.054457] env[68244]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781657, 'name': CreateVM_Task, 'duration_secs': 0.235375} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.054645] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Created VM on the ESX host {{(pid=68244) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1574.055099] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.055271] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1574.055601] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1574.055845] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bd1dc75-96ce-4cdc-a65a-0295d2727af8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.060104] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1574.060104] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52805019-b6ad-fda6-2dea-b5a49e3c86b1" [ 1574.060104] env[68244]: _type = "Task" [ 1574.060104] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.067145] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52805019-b6ad-fda6-2dea-b5a49e3c86b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.570526] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52805019-b6ad-fda6-2dea-b5a49e3c86b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.570897] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1574.571104] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Processing image 9aa0b4d1-af1b-4141-9ca6-95525b722d7e {{(pid=68244) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1574.571475] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.571658] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1574.571873] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1574.572183] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-352527b8-fc1b-4d2c-89a9-b6dffa128c4c {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.579741] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68244) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1574.579904] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68244) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1574.580584] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dac7423-aba4-4a47-b2be-058ca9a7c505 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.586312] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1574.586312] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b7929f-cabb-be38-6006-b0a525e475f2" [ 1574.586312] env[68244]: _type = "Task" [ 1574.586312] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.593098] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b7929f-cabb-be38-6006-b0a525e475f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.097677] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]52b7929f-cabb-be38-6006-b0a525e475f2, 'name': SearchDatastore_Task, 'duration_secs': 0.007906} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.098500] env[68244]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05269815-4164-4043-bb02-fea08522eba8 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.103541] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1575.103541] env[68244]: value = "session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525d2483-c4e4-e970-033c-356621d0243f" [ 1575.103541] env[68244]: _type = "Task" [ 1575.103541] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.110756] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525d2483-c4e4-e970-033c-356621d0243f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.613821] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': session[52a2e6b9-283a-6598-51d7-1ccb4786a6d3]525d2483-c4e4-e970-033c-356621d0243f, 'name': SearchDatastore_Task, 'duration_secs': 0.008819} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.614199] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1575.614357] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1575.614633] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79501320-6726-4bbf-b930-126ab0547676 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.623874] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1575.623874] env[68244]: value = "task-2781658" [ 1575.623874] env[68244]: _type = "Task" [ 1575.623874] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.630707] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.134587] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428006} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.134868] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9aa0b4d1-af1b-4141-9ca6-95525b722d7e/9aa0b4d1-af1b-4141-9ca6-95525b722d7e.vmdk to [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk {{(pid=68244) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1576.135124] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Extending root virtual disk to 1048576 {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1576.135377] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ca721a0-e66c-446a-a3d1-98c918d7cc6d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.141771] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1576.141771] env[68244]: value = "task-2781659" [ 1576.141771] env[68244]: _type = "Task" [ 1576.141771] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.149013] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781659, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.651401] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781659, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064494} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.651677] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Extended root virtual disk {{(pid=68244) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1576.652423] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ddfda3-6a38-41e6-9d60-ef47b0acf72a {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.671828] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1576.672061] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-360f36d5-7570-497e-93a8-369f4d44dbff {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.690714] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1576.690714] env[68244]: value = "task-2781660" [ 1576.690714] env[68244]: _type = "Task" [ 1576.690714] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.698491] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.200812] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781660, 'name': ReconfigVM_Task, 'duration_secs': 0.311579} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.201122] env[68244]: DEBUG nova.virt.vmwareapi.volumeops [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Reconfigured VM instance instance-0000007f to attach disk [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5/9ec3f40a-f15c-49de-bb72-89597f4550f5.vmdk or device None with type sparse {{(pid=68244) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1577.201762] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1dcc881e-c2fd-43bc-9ae4-366cacdacb3b {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.207640] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1577.207640] env[68244]: value = "task-2781661" [ 1577.207640] env[68244]: _type = "Task" [ 1577.207640] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.214861] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781661, 'name': Rename_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.717926] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781661, 'name': Rename_Task, 'duration_secs': 0.138886} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.718316] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powering on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1577.718478] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bc37daf-dcfc-4d80-ac63-6ed832daf422 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.724160] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1577.724160] env[68244]: value = "task-2781662" [ 1577.724160] env[68244]: _type = "Task" [ 1577.724160] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.731434] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781662, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.233780] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781662, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.733663] env[68244]: DEBUG oslo_vmware.api [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781662, 'name': PowerOnVM_Task, 'duration_secs': 0.691737} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.734070] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powered on the VM {{(pid=68244) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1578.734118] env[68244]: DEBUG nova.compute.manager [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Checking state {{(pid=68244) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1578.734818] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85714e69-c85a-4011-bc63-9baea90db723 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.758236] env[68244]: DEBUG oslo_service.periodic_task [None req-810d81ae-b152-498a-8c02-d94085dfda52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68244) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1579.250835] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1579.251113] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1579.251291] env[68244]: DEBUG nova.objects.instance [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68244) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1580.171770] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "9ec3f40a-f15c-49de-bb72-89597f4550f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1580.172132] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "9ec3f40a-f15c-49de-bb72-89597f4550f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1580.172202] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "9ec3f40a-f15c-49de-bb72-89597f4550f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1580.172388] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "9ec3f40a-f15c-49de-bb72-89597f4550f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1580.172558] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "9ec3f40a-f15c-49de-bb72-89597f4550f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1580.174689] env[68244]: INFO nova.compute.manager [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Terminating instance [ 1580.259163] env[68244]: DEBUG oslo_concurrency.lockutils [None req-735c4603-cfbe-4c33-9cc5-7785e994df7e tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1580.679416] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "refresh_cache-9ec3f40a-f15c-49de-bb72-89597f4550f5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.679716] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "refresh_cache-9ec3f40a-f15c-49de-bb72-89597f4550f5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1580.679796] env[68244]: DEBUG nova.network.neutron [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.286517] env[68244]: DEBUG nova.network.neutron [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1581.339883] env[68244]: DEBUG nova.network.neutron [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.844016] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "refresh_cache-9ec3f40a-f15c-49de-bb72-89597f4550f5" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1581.844016] env[68244]: DEBUG nova.compute.manager [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1581.844016] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1581.844634] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2c4eac-e224-4168-a072-ae0b74e19fcf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.852158] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1581.852389] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbf2af5e-a317-40f4-87d9-7df25b66b635 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.857994] env[68244]: DEBUG oslo_vmware.api [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1581.857994] env[68244]: value = "task-2781663" [ 1581.857994] env[68244]: _type = "Task" [ 1581.857994] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.866071] env[68244]: DEBUG oslo_vmware.api [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781663, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.367822] env[68244]: DEBUG oslo_vmware.api [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781663, 'name': PowerOffVM_Task, 'duration_secs': 0.176471} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.368198] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1582.368291] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1582.368528] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c992f453-d63c-4398-8597-ae0da54c47fa {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.390674] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1582.390874] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1582.391072] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleting the datastore file [datastore2] 9ec3f40a-f15c-49de-bb72-89597f4550f5 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1582.391313] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5cbed301-43c6-45e5-bcf8-181d891d9abf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.397270] env[68244]: DEBUG oslo_vmware.api [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1582.397270] env[68244]: value = "task-2781665" [ 1582.397270] env[68244]: _type = "Task" [ 1582.397270] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.404452] env[68244]: DEBUG oslo_vmware.api [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.907514] env[68244]: DEBUG oslo_vmware.api [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095061} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.907768] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1582.907953] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1582.908151] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1582.908332] env[68244]: INFO nova.compute.manager [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Took 1.06 seconds to destroy the instance on the hypervisor. [ 1582.908573] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1582.908763] env[68244]: DEBUG nova.compute.manager [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1582.908857] env[68244]: DEBUG nova.network.neutron [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1582.923899] env[68244]: DEBUG nova.network.neutron [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1583.426721] env[68244]: DEBUG nova.network.neutron [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.929949] env[68244]: INFO nova.compute.manager [-] [instance: 9ec3f40a-f15c-49de-bb72-89597f4550f5] Took 1.02 seconds to deallocate network for instance. [ 1584.436811] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1584.437200] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1584.437444] env[68244]: DEBUG nova.objects.instance [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lazy-loading 'resources' on Instance uuid 9ec3f40a-f15c-49de-bb72-89597f4550f5 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1584.982156] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd992ae-fcfd-46b5-927e-5595b54f6022 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.989866] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2e083c-996e-4bed-8dd9-7bab6e386310 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.019821] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb085d6-e022-4e72-8185-30f9204d55bf {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.027537] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc4bf23-8ac3-41e6-9642-c7dde742db90 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.040897] env[68244]: DEBUG nova.compute.provider_tree [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1585.544101] env[68244]: DEBUG nova.scheduler.client.report [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1586.049401] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1586.069417] env[68244]: INFO nova.scheduler.client.report [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleted allocations for instance 9ec3f40a-f15c-49de-bb72-89597f4550f5 [ 1586.579054] env[68244]: DEBUG oslo_concurrency.lockutils [None req-cfa84886-5b04-4a8c-aa2c-f88b5deeae31 tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "9ec3f40a-f15c-49de-bb72-89597f4550f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.407s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1587.695590] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "7ef698d0-b436-43a0-85d4-210006c37122" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1587.695983] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "7ef698d0-b436-43a0-85d4-210006c37122" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1587.696081] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "7ef698d0-b436-43a0-85d4-210006c37122-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1587.696305] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "7ef698d0-b436-43a0-85d4-210006c37122-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1587.696478] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "7ef698d0-b436-43a0-85d4-210006c37122-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1587.698826] env[68244]: INFO nova.compute.manager [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Terminating instance [ 1588.203020] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "refresh_cache-7ef698d0-b436-43a0-85d4-210006c37122" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.203236] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquired lock "refresh_cache-7ef698d0-b436-43a0-85d4-210006c37122" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1588.203398] env[68244]: DEBUG nova.network.neutron [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Building network info cache for instance {{(pid=68244) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1588.724735] env[68244]: DEBUG nova.network.neutron [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1588.776069] env[68244]: DEBUG nova.network.neutron [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.279289] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Releasing lock "refresh_cache-7ef698d0-b436-43a0-85d4-210006c37122" {{(pid=68244) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1589.279726] env[68244]: DEBUG nova.compute.manager [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Start destroying the instance on the hypervisor. {{(pid=68244) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1589.279916] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Destroying instance {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1589.280891] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7447b7-8878-484b-a05a-76e4c5fa2a64 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.289662] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Powering off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1589.289892] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c0084a1-997d-4281-a862-50cd9492ed49 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.296297] env[68244]: DEBUG oslo_vmware.api [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1589.296297] env[68244]: value = "task-2781666" [ 1589.296297] env[68244]: _type = "Task" [ 1589.296297] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.303976] env[68244]: DEBUG oslo_vmware.api [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.806348] env[68244]: DEBUG oslo_vmware.api [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781666, 'name': PowerOffVM_Task, 'duration_secs': 0.113693} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.806769] env[68244]: DEBUG nova.virt.vmwareapi.vm_util [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Powered off the VM {{(pid=68244) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1589.806769] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Unregistering the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1589.807402] env[68244]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ad7786a-b235-40b6-83f1-9fd16e798c6d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.833167] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Unregistered the VM {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1589.833392] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Deleting contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1589.833572] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleting the datastore file [datastore2] 7ef698d0-b436-43a0-85d4-210006c37122 {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1589.833805] env[68244]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df092665-0737-4787-8a22-bc70e4f15c87 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.840456] env[68244]: DEBUG oslo_vmware.api [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for the task: (returnval){ [ 1589.840456] env[68244]: value = "task-2781668" [ 1589.840456] env[68244]: _type = "Task" [ 1589.840456] env[68244]: } to complete. {{(pid=68244) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.847265] env[68244]: DEBUG oslo_vmware.api [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.350439] env[68244]: DEBUG oslo_vmware.api [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Task: {'id': task-2781668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080039} completed successfully. {{(pid=68244) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.350700] env[68244]: DEBUG nova.virt.vmwareapi.ds_util [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleted the datastore file {{(pid=68244) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1590.350881] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Deleted contents of the VM from datastore datastore2 {{(pid=68244) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1590.351071] env[68244]: DEBUG nova.virt.vmwareapi.vmops [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Instance destroyed {{(pid=68244) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1590.351248] env[68244]: INFO nova.compute.manager [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1590.351483] env[68244]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68244) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1590.351674] env[68244]: DEBUG nova.compute.manager [-] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Deallocating network for instance {{(pid=68244) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1590.351782] env[68244]: DEBUG nova.network.neutron [-] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] deallocate_for_instance() {{(pid=68244) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1590.367927] env[68244]: DEBUG nova.network.neutron [-] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Instance cache missing network info. {{(pid=68244) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1590.870296] env[68244]: DEBUG nova.network.neutron [-] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Updating instance_info_cache with network_info: [] {{(pid=68244) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.372906] env[68244]: INFO nova.compute.manager [-] [instance: 7ef698d0-b436-43a0-85d4-210006c37122] Took 1.02 seconds to deallocate network for instance. [ 1591.880708] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1591.881108] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1591.881237] env[68244]: DEBUG nova.objects.instance [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lazy-loading 'resources' on Instance uuid 7ef698d0-b436-43a0-85d4-210006c37122 {{(pid=68244) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1592.410636] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f9e320-e4a9-404f-9a1c-8f879a9933da {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.417986] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e575ae-6eba-4789-bdd4-659dc033cef6 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.447258] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11bf429-2250-436d-821d-ce82ebe3947d {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.456025] env[68244]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36f47cc-bcba-4504-b485-e7dc96d4f384 {{(pid=68244) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.468167] env[68244]: DEBUG nova.compute.provider_tree [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed in ProviderTree for provider: b885cb16-3bd4-46d8-abd9-28a1bf1058e3 {{(pid=68244) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.971561] env[68244]: DEBUG nova.scheduler.client.report [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Inventory has not changed for provider b885cb16-3bd4-46d8-abd9-28a1bf1058e3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68244) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1593.476435] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.595s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1593.492996] env[68244]: INFO nova.scheduler.client.report [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Deleted allocations for instance 7ef698d0-b436-43a0-85d4-210006c37122 [ 1594.004580] env[68244]: DEBUG oslo_concurrency.lockutils [None req-25c4be08-6e78-4601-a8d0-35f95420d8ec tempest-ServerShowV247Test-2082864385 tempest-ServerShowV247Test-2082864385-project-member] Lock "7ef698d0-b436-43a0-85d4-210006c37122" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.309s {{(pid=68244) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}